From a4290b5f70952a7d41a4509d627e55264a6e23df Mon Sep 17 00:00:00 2001
From: cloudera_vm <cloudera@quickstart.cloudera>
Date: Sat, 18 Feb 2017 09:52:16 -0800
Subject: [PATCH] Q1.iv 50 reduceurs + 1 combiners and compression map output

---
 Assign1/hadoop.log                       | 1815 ++++++++++++++++++++++
 Assign1/output_Q1.iiii/._SUCCESS.crc     |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00000.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00001.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00002.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00003.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00004.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00005.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00006.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00007.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00008.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00009.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00010.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00011.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00012.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00013.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00014.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00015.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00016.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00017.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00018.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00019.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00020.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00021.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00022.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00023.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00024.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00025.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00026.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00027.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00028.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00029.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00030.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00031.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00032.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00033.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00034.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00035.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00036.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00037.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00038.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00039.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00040.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00041.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00042.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00043.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00044.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00045.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00046.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00047.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/.part-r-00048.crc |  Bin 0 -> 8 bytes
 Assign1/output_Q1.iiii/.part-r-00049.crc |  Bin 0 -> 12 bytes
 Assign1/output_Q1.iiii/_SUCCESS          |    0
 Assign1/output_Q1.iiii/part-r-00000      |    2 +
 Assign1/output_Q1.iiii/part-r-00001      |    3 +
 Assign1/output_Q1.iiii/part-r-00002      |    0
 Assign1/output_Q1.iiii/part-r-00003      |    1 +
 Assign1/output_Q1.iiii/part-r-00004      |    1 +
 Assign1/output_Q1.iiii/part-r-00005      |    3 +
 Assign1/output_Q1.iiii/part-r-00006      |    0
 Assign1/output_Q1.iiii/part-r-00007      |    3 +
 Assign1/output_Q1.iiii/part-r-00008      |    2 +
 Assign1/output_Q1.iiii/part-r-00009      |    0
 Assign1/output_Q1.iiii/part-r-00010      |    0
 Assign1/output_Q1.iiii/part-r-00011      |    4 +
 Assign1/output_Q1.iiii/part-r-00012      |    4 +
 Assign1/output_Q1.iiii/part-r-00013      |    0
 Assign1/output_Q1.iiii/part-r-00014      |    1 +
 Assign1/output_Q1.iiii/part-r-00015      |    0
 Assign1/output_Q1.iiii/part-r-00016      |    2 +
 Assign1/output_Q1.iiii/part-r-00017      |    1 +
 Assign1/output_Q1.iiii/part-r-00018      |    4 +
 Assign1/output_Q1.iiii/part-r-00019      |    0
 Assign1/output_Q1.iiii/part-r-00020      |    2 +
 Assign1/output_Q1.iiii/part-r-00021      |    0
 Assign1/output_Q1.iiii/part-r-00022      |    3 +
 Assign1/output_Q1.iiii/part-r-00023      |    2 +
 Assign1/output_Q1.iiii/part-r-00024      |    0
 Assign1/output_Q1.iiii/part-r-00025      |    0
 Assign1/output_Q1.iiii/part-r-00026      |    1 +
 Assign1/output_Q1.iiii/part-r-00027      |    1 +
 Assign1/output_Q1.iiii/part-r-00028      |    2 +
 Assign1/output_Q1.iiii/part-r-00029      |    1 +
 Assign1/output_Q1.iiii/part-r-00030      |    2 +
 Assign1/output_Q1.iiii/part-r-00031      |    4 +
 Assign1/output_Q1.iiii/part-r-00032      |    2 +
 Assign1/output_Q1.iiii/part-r-00033      |    1 +
 Assign1/output_Q1.iiii/part-r-00034      |    1 +
 Assign1/output_Q1.iiii/part-r-00035      |    2 +
 Assign1/output_Q1.iiii/part-r-00036      |    3 +
 Assign1/output_Q1.iiii/part-r-00037      |    2 +
 Assign1/output_Q1.iiii/part-r-00038      |    2 +
 Assign1/output_Q1.iiii/part-r-00039      |    1 +
 Assign1/output_Q1.iiii/part-r-00040      |    0
 Assign1/output_Q1.iiii/part-r-00041      |    4 +
 Assign1/output_Q1.iiii/part-r-00042      |    3 +
 Assign1/output_Q1.iiii/part-r-00043      |    2 +
 Assign1/output_Q1.iiii/part-r-00044      |    2 +
 Assign1/output_Q1.iiii/part-r-00045      |    0
 Assign1/output_Q1.iiii/part-r-00046      |    1 +
 Assign1/output_Q1.iiii/part-r-00047      |    1 +
 Assign1/output_Q1.iiii/part-r-00048      |    0
 Assign1/output_Q1.iiii/part-r-00049      |    1 +
 Assign1/output_Q1.iiii/stopwords.csv     |   76 +
 Assign1/output_Q1.iiii/stopwords.csv~    |   77 +
 Assign1/src/Question1/Stopword_iiii.java |   92 ++
 106 files changed, 2137 insertions(+)
 create mode 100644 Assign1/output_Q1.iiii/._SUCCESS.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00000.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00001.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00002.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00003.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00004.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00005.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00006.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00007.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00008.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00009.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00010.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00011.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00012.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00013.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00014.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00015.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00016.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00017.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00018.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00019.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00020.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00021.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00022.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00023.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00024.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00025.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00026.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00027.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00028.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00029.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00030.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00031.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00032.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00033.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00034.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00035.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00036.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00037.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00038.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00039.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00040.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00041.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00042.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00043.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00044.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00045.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00046.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00047.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00048.crc
 create mode 100644 Assign1/output_Q1.iiii/.part-r-00049.crc
 create mode 100644 Assign1/output_Q1.iiii/_SUCCESS
 create mode 100644 Assign1/output_Q1.iiii/part-r-00000
 create mode 100644 Assign1/output_Q1.iiii/part-r-00001
 create mode 100644 Assign1/output_Q1.iiii/part-r-00002
 create mode 100644 Assign1/output_Q1.iiii/part-r-00003
 create mode 100644 Assign1/output_Q1.iiii/part-r-00004
 create mode 100644 Assign1/output_Q1.iiii/part-r-00005
 create mode 100644 Assign1/output_Q1.iiii/part-r-00006
 create mode 100644 Assign1/output_Q1.iiii/part-r-00007
 create mode 100644 Assign1/output_Q1.iiii/part-r-00008
 create mode 100644 Assign1/output_Q1.iiii/part-r-00009
 create mode 100644 Assign1/output_Q1.iiii/part-r-00010
 create mode 100644 Assign1/output_Q1.iiii/part-r-00011
 create mode 100644 Assign1/output_Q1.iiii/part-r-00012
 create mode 100644 Assign1/output_Q1.iiii/part-r-00013
 create mode 100644 Assign1/output_Q1.iiii/part-r-00014
 create mode 100644 Assign1/output_Q1.iiii/part-r-00015
 create mode 100644 Assign1/output_Q1.iiii/part-r-00016
 create mode 100644 Assign1/output_Q1.iiii/part-r-00017
 create mode 100644 Assign1/output_Q1.iiii/part-r-00018
 create mode 100644 Assign1/output_Q1.iiii/part-r-00019
 create mode 100644 Assign1/output_Q1.iiii/part-r-00020
 create mode 100644 Assign1/output_Q1.iiii/part-r-00021
 create mode 100644 Assign1/output_Q1.iiii/part-r-00022
 create mode 100644 Assign1/output_Q1.iiii/part-r-00023
 create mode 100644 Assign1/output_Q1.iiii/part-r-00024
 create mode 100644 Assign1/output_Q1.iiii/part-r-00025
 create mode 100644 Assign1/output_Q1.iiii/part-r-00026
 create mode 100644 Assign1/output_Q1.iiii/part-r-00027
 create mode 100644 Assign1/output_Q1.iiii/part-r-00028
 create mode 100644 Assign1/output_Q1.iiii/part-r-00029
 create mode 100644 Assign1/output_Q1.iiii/part-r-00030
 create mode 100644 Assign1/output_Q1.iiii/part-r-00031
 create mode 100644 Assign1/output_Q1.iiii/part-r-00032
 create mode 100644 Assign1/output_Q1.iiii/part-r-00033
 create mode 100644 Assign1/output_Q1.iiii/part-r-00034
 create mode 100644 Assign1/output_Q1.iiii/part-r-00035
 create mode 100644 Assign1/output_Q1.iiii/part-r-00036
 create mode 100644 Assign1/output_Q1.iiii/part-r-00037
 create mode 100644 Assign1/output_Q1.iiii/part-r-00038
 create mode 100644 Assign1/output_Q1.iiii/part-r-00039
 create mode 100644 Assign1/output_Q1.iiii/part-r-00040
 create mode 100644 Assign1/output_Q1.iiii/part-r-00041
 create mode 100644 Assign1/output_Q1.iiii/part-r-00042
 create mode 100644 Assign1/output_Q1.iiii/part-r-00043
 create mode 100644 Assign1/output_Q1.iiii/part-r-00044
 create mode 100644 Assign1/output_Q1.iiii/part-r-00045
 create mode 100644 Assign1/output_Q1.iiii/part-r-00046
 create mode 100644 Assign1/output_Q1.iiii/part-r-00047
 create mode 100644 Assign1/output_Q1.iiii/part-r-00048
 create mode 100644 Assign1/output_Q1.iiii/part-r-00049
 create mode 100644 Assign1/output_Q1.iiii/stopwords.csv
 create mode 100644 Assign1/output_Q1.iiii/stopwords.csv~
 create mode 100644 Assign1/src/Question1/Stopword_iiii.java

diff --git a/Assign1/hadoop.log b/Assign1/hadoop.log
index 288c3b7..f09b732 100644
--- a/Assign1/hadoop.log
+++ b/Assign1/hadoop.log
@@ -8614,3 +8614,1818 @@ Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.Http
 		Bytes Read=26057874
 	File Output Format Counters 
 		Bytes Written=862
+2017-02-18 09:48:17,921 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
+2017-02-18 09:48:18,238 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress
+2017-02-18 09:48:18,273 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec
+2017-02-18 09:48:19,566 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
+2017-02-18 09:48:19,568 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
+2017-02-18 09:48:21,035 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
+2017-02-18 09:48:21,098 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3
+2017-02-18 09:48:21,373 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3
+2017-02-18 09:48:22,374 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1938545376_0001
+2017-02-18 09:48:23,753 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/
+2017-02-18 09:48:23,754 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1938545376_0001
+2017-02-18 09:48:23,764 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null
+2017-02-18 09:48:23,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:48:23,806 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+2017-02-18 09:48:24,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks
+2017-02-18 09:48:24,127 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:48:24,295 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:48:24,379 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:48:24,385 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935
+2017-02-18 09:48:24,758 INFO org.apache.hadoop.mapreduce.Job: Job job_local1938545376_0001 running in uber mode : false
+2017-02-18 09:48:24,761 INFO org.apache.hadoop.mapreduce.Job:  map 0% reduce 0%
+2017-02-18 09:48:25,010 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-02-18 09:48:25,031 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-02-18 09:48:25,032 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-02-18 09:48:25,033 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-02-18 09:48:25,033 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-02-18 09:48:25,066 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-02-18 09:48:25,088 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it
+2017-02-18 09:48:30,384 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-02-18 09:48:30,850 INFO org.apache.hadoop.mapreduce.Job:  map 7% reduce 0%
+2017-02-18 09:48:33,397 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-02-18 09:48:33,876 INFO org.apache.hadoop.mapreduce.Job:  map 15% reduce 0%
+2017-02-18 09:48:36,121 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map
+2017-02-18 09:48:36,123 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-02-18 09:48:36,123 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-02-18 09:48:36,124 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600
+2017-02-18 09:48:36,124 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600
+2017-02-18 09:48:36,409 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:48:36,888 INFO org.apache.hadoop.mapreduce.Job:  map 22% reduce 0%
+2017-02-18 09:48:39,417 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:48:42,419 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:48:45,426 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:48:48,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.bz2]
+2017-02-18 09:48:48,430 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:48:51,436 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:48:54,810 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-02-18 09:48:54,835 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000000_0 is done. And is in the process of committing
+2017-02-18 09:48:54,842 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-02-18 09:48:54,846 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000000_0' done.
+2017-02-18 09:48:54,847 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:48:54,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:48:54,857 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:48:54,858 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:48:54,865 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889
+2017-02-18 09:48:55,055 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-02-18 09:48:55,162 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-02-18 09:48:55,164 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-02-18 09:48:55,172 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-02-18 09:48:55,185 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it
+2017-02-18 09:48:58,333 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-02-18 09:48:58,341 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-02-18 09:48:58,341 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-02-18 09:48:58,342 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600
+2017-02-18 09:48:58,342 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600
+2017-02-18 09:48:59,085 INFO org.apache.hadoop.mapreduce.Job:  map 33% reduce 0%
+2017-02-18 09:49:00,871 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:49:01,095 INFO org.apache.hadoop.mapreduce.Job:  map 56% reduce 0%
+2017-02-18 09:49:03,873 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:49:04,061 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-02-18 09:49:04,076 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000001_0 is done. And is in the process of committing
+2017-02-18 09:49:04,081 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-02-18 09:49:04,085 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000001_0' done.
+2017-02-18 09:49:04,085 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:04,086 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:04,093 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:04,094 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:04,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050
+2017-02-18 09:49:04,290 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-02-18 09:49:04,384 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584)
+2017-02-18 09:49:04,405 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100
+2017-02-18 09:49:04,406 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080
+2017-02-18 09:49:04,407 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600
+2017-02-18 09:49:04,407 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600
+2017-02-18 09:49:04,414 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
+2017-02-18 09:49:04,416 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it
+2017-02-18 09:49:06,527 INFO org.apache.hadoop.mapred.LocalJobRunner: 
+2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output
+2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: Spilling map output
+2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600
+2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600
+2017-02-18 09:49:07,315 INFO org.apache.hadoop.mapreduce.Job:  map 67% reduce 0%
+2017-02-18 09:49:10,103 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort
+2017-02-18 09:49:10,335 INFO org.apache.hadoop.mapreduce.Job:  map 89% reduce 0%
+2017-02-18 09:49:11,103 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0
+2017-02-18 09:49:11,115 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000002_0 is done. And is in the process of committing
+2017-02-18 09:49:11,123 INFO org.apache.hadoop.mapred.LocalJobRunner: map
+2017-02-18 09:49:11,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000002_0' done.
+2017-02-18 09:49:11,125 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:11,126 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete.
+2017-02-18 09:49:11,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks
+2017-02-18 09:49:11,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000000_0
+2017-02-18 09:49:11,269 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:11,270 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:11,288 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@639e075e
+2017-02-18 09:49:11,337 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 0%
+2017-02-18 09:49:11,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:11,397 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:11,571 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.bz2]
+2017-02-18 09:49:11,575 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 20 len: 70 to MEMORY
+2017-02-18 09:49:11,608 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 20 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:11,619 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 20, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->20
+2017-02-18 09:49:11,633 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:11,642 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:11,646 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 20, usedMemory ->31
+2017-02-18 09:49:11,651 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:11,658 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:11,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 31, usedMemory ->42
+2017-02-18 09:49:11,664 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:11,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:11,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:11,692 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:11,701 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes
+2017-02-18 09:49:11,728 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 42 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:11,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 95 bytes from disk
+2017-02-18 09:49:11,738 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:11,741 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:11,746 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 33 bytes
+2017-02-18 09:49:11,748 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:11,796 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
+2017-02-18 09:49:11,799 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000000_0 is done. And is in the process of committing
+2017-02-18 09:49:11,800 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:11,800 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000000_0 is allowed to commit now
+2017-02-18 09:49:11,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000000
+2017-02-18 09:49:11,811 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:11,813 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000000_0' done.
+2017-02-18 09:49:11,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000000_0
+2017-02-18 09:49:11,821 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000001_0
+2017-02-18 09:49:11,832 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:11,833 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:11,834 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@362f58d7
+2017-02-18 09:49:11,840 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:11,854 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:11,863 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 29 len: 79 to MEMORY
+2017-02-18 09:49:11,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 29 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:11,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 29, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->29
+2017-02-18 09:49:11,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 9 len: 53 to MEMORY
+2017-02-18 09:49:11,897 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:11,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9, inMemoryMapOutputs.size() -> 2, commitMemory -> 29, usedMemory ->38
+2017-02-18 09:49:11,905 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 9 len: 49 to MEMORY
+2017-02-18 09:49:11,919 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:11,924 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9, inMemoryMapOutputs.size() -> 3, commitMemory -> 38, usedMemory ->47
+2017-02-18 09:49:11,927 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:11,928 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:11,928 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:11,929 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:11,930 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 38 bytes
+2017-02-18 09:49:12,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 47 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:12,007 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 104 bytes from disk
+2017-02-18 09:49:12,009 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:12,010 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:12,011 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 40 bytes
+2017-02-18 09:49:12,016 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,051 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000001_0 is done. And is in the process of committing
+2017-02-18 09:49:12,053 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,053 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000001_0 is allowed to commit now
+2017-02-18 09:49:12,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000001
+2017-02-18 09:49:12,083 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:12,096 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000001_0' done.
+2017-02-18 09:49:12,097 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000001_0
+2017-02-18 09:49:12,098 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000002_0
+2017-02-18 09:49:12,108 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:12,108 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:12,109 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6b121f65
+2017-02-18 09:49:12,115 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:12,134 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:12,143 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:12,162 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:12,164 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:12,168 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:12,176 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:12,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:12,183 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:12,191 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:12,196 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:12,198 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:12,199 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,199 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:12,203 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:12,203 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:12,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:12,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:12,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:12,245 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:12,246 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:12,250 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,295 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000002_0 is done. And is in the process of committing
+2017-02-18 09:49:12,298 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,298 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000002_0 is allowed to commit now
+2017-02-18 09:49:12,299 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000002
+2017-02-18 09:49:12,304 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:12,307 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000002_0' done.
+2017-02-18 09:49:12,307 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000002_0
+2017-02-18 09:49:12,308 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000003_0
+2017-02-18 09:49:12,318 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:12,319 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:12,319 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@533f785b
+2017-02-18 09:49:12,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:12,344 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-02-18 09:49:12,350 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:12,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 56 to MEMORY
+2017-02-18 09:49:12,364 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:12,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12
+2017-02-18 09:49:12,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:12,388 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:12,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14
+2017-02-18 09:49:12,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:12,412 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:12,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16
+2017-02-18 09:49:12,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:12,422 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,422 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:12,424 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:12,425 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:12,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:12,477 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 70 bytes from disk
+2017-02-18 09:49:12,479 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:12,480 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:12,492 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:12,501 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,547 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000003_0 is done. And is in the process of committing
+2017-02-18 09:49:12,549 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,550 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000003_0 is allowed to commit now
+2017-02-18 09:49:12,552 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000003
+2017-02-18 09:49:12,557 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:12,560 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000003_0' done.
+2017-02-18 09:49:12,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000003_0
+2017-02-18 09:49:12,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000004_0
+2017-02-18 09:49:12,575 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:12,577 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:12,579 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@51d934bf
+2017-02-18 09:49:12,586 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:12,599 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:12,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 56 to MEMORY
+2017-02-18 09:49:12,621 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:12,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11
+2017-02-18 09:49:12,628 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:12,635 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:12,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22
+2017-02-18 09:49:12,641 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:12,648 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:12,652 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33
+2017-02-18 09:49:12,656 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:12,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:12,668 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:12,670 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes
+2017-02-18 09:49:12,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:12,705 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 86 bytes from disk
+2017-02-18 09:49:12,706 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:12,707 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:12,708 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes
+2017-02-18 09:49:12,712 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,744 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000004_0 is done. And is in the process of committing
+2017-02-18 09:49:12,747 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,756 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000004_0 is allowed to commit now
+2017-02-18 09:49:12,757 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000004
+2017-02-18 09:49:12,763 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:12,766 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000004_0' done.
+2017-02-18 09:49:12,767 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000004_0
+2017-02-18 09:49:12,770 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000005_0
+2017-02-18 09:49:12,777 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:12,778 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:12,785 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1af653a6
+2017-02-18 09:49:12,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:12,811 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:12,821 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 33 len: 81 to MEMORY
+2017-02-18 09:49:12,826 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:12,829 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->33
+2017-02-18 09:49:12,832 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 23 len: 69 to MEMORY
+2017-02-18 09:49:12,850 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:12,856 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 33, usedMemory ->56
+2017-02-18 09:49:12,868 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 23 len: 68 to MEMORY
+2017-02-18 09:49:12,875 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:12,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 56, usedMemory ->79
+2017-02-18 09:49:12,881 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:12,882 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,882 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:12,884 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:12,884 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 61 bytes
+2017-02-18 09:49:12,911 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 79 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:12,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 113 bytes from disk
+2017-02-18 09:49:12,927 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:12,928 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:12,929 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69 bytes
+2017-02-18 09:49:12,935 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,979 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000005_0 is done. And is in the process of committing
+2017-02-18 09:49:12,981 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:12,981 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000005_0 is allowed to commit now
+2017-02-18 09:49:12,984 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000005
+2017-02-18 09:49:12,987 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:12,989 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000005_0' done.
+2017-02-18 09:49:12,990 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000005_0
+2017-02-18 09:49:12,991 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000006_0
+2017-02-18 09:49:13,000 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:13,001 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:13,001 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@685461f1
+2017-02-18 09:49:13,014 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:13,027 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:13,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:13,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:13,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,074 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:13,077 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:13,079 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,085 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:13,098 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:13,099 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:13,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:13,102 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:13,102 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:13,122 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:13,150 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:13,151 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000006_0 is done. And is in the process of committing
+2017-02-18 09:49:13,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000006_0 is allowed to commit now
+2017-02-18 09:49:13,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000006
+2017-02-18 09:49:13,217 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:13,221 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000006_0' done.
+2017-02-18 09:49:13,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000006_0
+2017-02-18 09:49:13,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000007_0
+2017-02-18 09:49:13,234 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:13,235 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:13,238 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5f132bfb
+2017-02-18 09:49:13,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:13,260 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:13,270 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 33 len: 79 to MEMORY
+2017-02-18 09:49:13,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:13,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->33
+2017-02-18 09:49:13,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,313 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:13,314 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 33, usedMemory ->35
+2017-02-18 09:49:13,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,330 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:13,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37
+2017-02-18 09:49:13,339 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:13,340 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:13,342 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:13,343 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes
+2017-02-18 09:49:13,352 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 14%
+2017-02-18 09:49:13,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 93 bytes from disk
+2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:13,411 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes
+2017-02-18 09:49:13,412 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,458 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000007_0 is done. And is in the process of committing
+2017-02-18 09:49:13,459 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,460 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000007_0 is allowed to commit now
+2017-02-18 09:49:13,466 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000007
+2017-02-18 09:49:13,474 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:13,478 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000007_0' done.
+2017-02-18 09:49:13,479 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000007_0
+2017-02-18 09:49:13,480 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000008_0
+2017-02-18 09:49:13,487 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:13,488 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:13,496 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@19be1bc9
+2017-02-18 09:49:13,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:13,527 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:13,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 73 to MEMORY
+2017-02-18 09:49:13,551 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:13,561 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23
+2017-02-18 09:49:13,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 59 to MEMORY
+2017-02-18 09:49:13,572 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:13,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->35
+2017-02-18 09:49:13,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 59 to MEMORY
+2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->47
+2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:13,607 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:13,609 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:13,609 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 29 bytes
+2017-02-18 09:49:13,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 47 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:13,649 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk
+2017-02-18 09:49:13,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:13,651 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:13,656 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 37 bytes
+2017-02-18 09:49:13,659 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,705 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000008_0 is done. And is in the process of committing
+2017-02-18 09:49:13,707 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,707 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000008_0 is allowed to commit now
+2017-02-18 09:49:13,708 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000008
+2017-02-18 09:49:13,715 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:13,722 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000008_0' done.
+2017-02-18 09:49:13,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000008_0
+2017-02-18 09:49:13,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000009_0
+2017-02-18 09:49:13,739 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:13,740 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:13,744 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f9623fe
+2017-02-18 09:49:13,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:13,801 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:13,812 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,816 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:13,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:13,830 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,832 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:13,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:13,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:13,867 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:13,875 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:13,880 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:13,881 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:13,883 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:13,883 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:13,890 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:13,907 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:13,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:13,913 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:13,924 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:13,935 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,976 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000009_0 is done. And is in the process of committing
+2017-02-18 09:49:13,977 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:13,977 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000009_0 is allowed to commit now
+2017-02-18 09:49:13,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000009
+2017-02-18 09:49:13,990 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:13,996 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000009_0' done.
+2017-02-18 09:49:13,997 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000009_0
+2017-02-18 09:49:13,997 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000010_0
+2017-02-18 09:49:14,014 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:14,015 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:14,025 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2926cd3a
+2017-02-18 09:49:14,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:14,046 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000010_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:14,055 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:14,061 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:14,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:14,069 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:14,081 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:14,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:14,098 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:14,106 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:14,110 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:14,112 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:14,113 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,113 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:14,116 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:14,116 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:14,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:14,150 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:14,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:14,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:14,153 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:14,154 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,186 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000010_0 is done. And is in the process of committing
+2017-02-18 09:49:14,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,188 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000010_0 is allowed to commit now
+2017-02-18 09:49:14,189 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000010_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000010
+2017-02-18 09:49:14,191 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:14,194 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000010_0' done.
+2017-02-18 09:49:14,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000010_0
+2017-02-18 09:49:14,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000011_0
+2017-02-18 09:49:14,202 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:14,202 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:14,203 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@38ea76e6
+2017-02-18 09:49:14,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:14,232 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000011_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:14,241 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 44 len: 92 to MEMORY
+2017-02-18 09:49:14,248 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 44 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:14,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 44, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->44
+2017-02-18 09:49:14,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 33 len: 81 to MEMORY
+2017-02-18 09:49:14,268 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:14,271 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 44, usedMemory ->77
+2017-02-18 09:49:14,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 13 len: 58 to MEMORY
+2017-02-18 09:49:14,297 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:14,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->90
+2017-02-18 09:49:14,303 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:14,305 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,305 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:14,306 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:14,306 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes
+2017-02-18 09:49:14,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 90 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:14,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk
+2017-02-18 09:49:14,339 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:14,339 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:14,341 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 79 bytes
+2017-02-18 09:49:14,351 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,354 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 22%
+2017-02-18 09:49:14,388 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000011_0 is done. And is in the process of committing
+2017-02-18 09:49:14,390 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,390 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000011_0 is allowed to commit now
+2017-02-18 09:49:14,391 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000011_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000011
+2017-02-18 09:49:14,395 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:14,397 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000011_0' done.
+2017-02-18 09:49:14,398 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000011_0
+2017-02-18 09:49:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000012_0
+2017-02-18 09:49:14,431 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:14,432 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:14,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3038859d
+2017-02-18 09:49:14,461 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:14,475 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000012_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:14,491 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 32 len: 80 to MEMORY
+2017-02-18 09:49:14,506 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 32 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:14,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 32, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->32
+2017-02-18 09:49:14,531 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 22 len: 65 to MEMORY
+2017-02-18 09:49:14,540 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:14,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 2, commitMemory -> 32, usedMemory ->54
+2017-02-18 09:49:14,548 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 60 to MEMORY
+2017-02-18 09:49:14,559 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:14,564 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 54, usedMemory ->66
+2017-02-18 09:49:14,568 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:14,569 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,569 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:14,570 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:14,570 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 48 bytes
+2017-02-18 09:49:14,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 66 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:14,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 110 bytes from disk
+2017-02-18 09:49:14,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:14,604 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:14,605 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 56 bytes
+2017-02-18 09:49:14,609 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,648 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000012_0 is done. And is in the process of committing
+2017-02-18 09:49:14,650 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,650 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000012_0 is allowed to commit now
+2017-02-18 09:49:14,651 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000012_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000012
+2017-02-18 09:49:14,656 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:14,659 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000012_0' done.
+2017-02-18 09:49:14,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000012_0
+2017-02-18 09:49:14,660 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000013_0
+2017-02-18 09:49:14,668 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:14,669 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:14,670 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1498c437
+2017-02-18 09:49:14,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:14,698 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000013_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:14,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:14,715 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:14,722 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:14,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:14,733 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:14,737 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:14,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:14,747 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:14,750 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:14,752 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:14,753 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:14,775 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:14,782 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:14,818 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:14,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:14,831 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:14,832 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:14,833 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:14,837 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,869 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000013_0 is done. And is in the process of committing
+2017-02-18 09:49:14,870 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:14,870 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000013_0 is allowed to commit now
+2017-02-18 09:49:14,871 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000013_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000013
+2017-02-18 09:49:14,873 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:14,885 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000013_0' done.
+2017-02-18 09:49:14,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000013_0
+2017-02-18 09:49:14,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000014_0
+2017-02-18 09:49:14,893 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:14,893 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:14,894 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@37250eeb
+2017-02-18 09:49:14,903 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:14,920 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000014_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:14,934 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 56 to MEMORY
+2017-02-18 09:49:14,951 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:14,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12
+2017-02-18 09:49:14,971 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:14,995 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:14,995 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14
+2017-02-18 09:49:14,997 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,010 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:15,021 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16
+2017-02-18 09:49:15,021 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:15,022 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,022 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:15,024 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:15,025 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:15,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:15,074 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 70 bytes from disk
+2017-02-18 09:49:15,075 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:15,075 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:15,081 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:15,082 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,115 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000014_0 is done. And is in the process of committing
+2017-02-18 09:49:15,118 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,119 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000014_0 is allowed to commit now
+2017-02-18 09:49:15,120 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000014_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000014
+2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000014_0' done.
+2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000014_0
+2017-02-18 09:49:15,137 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000015_0
+2017-02-18 09:49:15,148 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:15,149 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:15,150 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@42005598
+2017-02-18 09:49:15,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:15,189 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000015_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:15,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,203 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:15,207 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:15,210 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,228 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:15,239 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:15,241 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,255 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:15,262 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:15,263 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:15,264 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:15,265 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:15,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:15,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:15,325 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:15,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:15,335 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:15,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:15,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,356 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 30%
+2017-02-18 09:49:15,382 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000015_0 is done. And is in the process of committing
+2017-02-18 09:49:15,384 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,384 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000015_0 is allowed to commit now
+2017-02-18 09:49:15,385 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000015_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000015
+2017-02-18 09:49:15,388 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:15,390 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000015_0' done.
+2017-02-18 09:49:15,391 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000015_0
+2017-02-18 09:49:15,392 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000016_0
+2017-02-18 09:49:15,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:15,423 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:15,424 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@562962ff
+2017-02-18 09:49:15,431 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:15,445 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000016_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:15,448 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 75 to MEMORY
+2017-02-18 09:49:15,458 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:15,463 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23
+2017-02-18 09:49:15,489 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,491 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:15,495 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25
+2017-02-18 09:49:15,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,504 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:15,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27
+2017-02-18 09:49:15,509 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:15,510 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:15,513 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:15,513 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes
+2017-02-18 09:49:15,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:15,568 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 89 bytes from disk
+2017-02-18 09:49:15,570 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:15,570 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:15,571 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes
+2017-02-18 09:49:15,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,620 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000016_0 is done. And is in the process of committing
+2017-02-18 09:49:15,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,622 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000016_0 is allowed to commit now
+2017-02-18 09:49:15,626 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000016_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000016
+2017-02-18 09:49:15,635 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:15,635 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000016_0' done.
+2017-02-18 09:49:15,636 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000016_0
+2017-02-18 09:49:15,646 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000017_0
+2017-02-18 09:49:15,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:15,653 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:15,653 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7a960b08
+2017-02-18 09:49:15,660 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:15,670 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000017_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:15,690 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 13 len: 58 to MEMORY
+2017-02-18 09:49:15,706 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:15,710 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->13
+2017-02-18 09:49:15,713 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,720 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:15,723 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 13, usedMemory ->15
+2017-02-18 09:49:15,726 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:15,745 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:15,745 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17
+2017-02-18 09:49:15,746 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:15,746 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,747 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:15,748 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:15,758 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:15,782 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:15,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 72 bytes from disk
+2017-02-18 09:49:15,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:15,798 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:15,799 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:15,803 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,828 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000017_0 is done. And is in the process of committing
+2017-02-18 09:49:15,837 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,837 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000017_0 is allowed to commit now
+2017-02-18 09:49:15,838 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000017_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000017
+2017-02-18 09:49:15,845 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:15,847 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000017_0' done.
+2017-02-18 09:49:15,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000017_0
+2017-02-18 09:49:15,854 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000018_0
+2017-02-18 09:49:15,860 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:15,861 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:15,861 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a08ec1c
+2017-02-18 09:49:15,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:15,882 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000018_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:15,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 40 len: 94 to MEMORY
+2017-02-18 09:49:15,906 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 40 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:15,910 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 40, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->40
+2017-02-18 09:49:15,913 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 31 len: 77 to MEMORY
+2017-02-18 09:49:15,923 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:15,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 40, usedMemory ->71
+2017-02-18 09:49:15,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 31 len: 80 to MEMORY
+2017-02-18 09:49:15,940 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:15,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 71, usedMemory ->102
+2017-02-18 09:49:15,947 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:15,948 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:15,948 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:15,949 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:15,949 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 84 bytes
+2017-02-18 09:49:15,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 102 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:15,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 137 bytes from disk
+2017-02-18 09:49:15,992 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:15,993 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:15,994 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 92 bytes
+2017-02-18 09:49:15,999 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,027 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000018_0 is done. And is in the process of committing
+2017-02-18 09:49:16,029 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000018_0 is allowed to commit now
+2017-02-18 09:49:16,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000018_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000018
+2017-02-18 09:49:16,041 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:16,043 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000018_0' done.
+2017-02-18 09:49:16,044 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000018_0
+2017-02-18 09:49:16,045 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000019_0
+2017-02-18 09:49:16,051 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:16,052 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:16,053 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cac98e5
+2017-02-18 09:49:16,060 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:16,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000019_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:16,082 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,097 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:16,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:16,104 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,114 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:16,124 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:16,128 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,134 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:16,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:16,140 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:16,141 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,142 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:16,143 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:16,145 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:16,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:16,183 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:16,185 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:16,186 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:16,187 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:16,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,221 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000019_0 is done. And is in the process of committing
+2017-02-18 09:49:16,222 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,222 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000019_0 is allowed to commit now
+2017-02-18 09:49:16,223 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000019_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000019
+2017-02-18 09:49:16,227 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:16,230 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000019_0' done.
+2017-02-18 09:49:16,230 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000019_0
+2017-02-18 09:49:16,231 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000020_0
+2017-02-18 09:49:16,244 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:16,245 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:16,245 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@29a3caf2
+2017-02-18 09:49:16,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:16,267 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000020_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:16,273 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 74 to MEMORY
+2017-02-18 09:49:16,284 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:16,288 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23
+2017-02-18 09:49:16,296 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 59 to MEMORY
+2017-02-18 09:49:16,300 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:16,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->34
+2017-02-18 09:49:16,314 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,327 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:16,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->36
+2017-02-18 09:49:16,334 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:16,335 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:16,337 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:16,337 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 21 bytes
+2017-02-18 09:49:16,361 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 36 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:16,365 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 40%
+2017-02-18 09:49:16,373 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 96 bytes from disk
+2017-02-18 09:49:16,375 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:16,377 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:16,379 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes
+2017-02-18 09:49:16,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,407 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000020_0 is done. And is in the process of committing
+2017-02-18 09:49:16,409 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,409 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000020_0 is allowed to commit now
+2017-02-18 09:49:16,410 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000020_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000020
+2017-02-18 09:49:16,413 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:16,415 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000020_0' done.
+2017-02-18 09:49:16,416 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000020_0
+2017-02-18 09:49:16,416 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000021_0
+2017-02-18 09:49:16,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:16,423 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:16,423 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@610f3309
+2017-02-18 09:49:16,441 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:16,456 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000021_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:16,465 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,482 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:16,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:16,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,526 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:16,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:16,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,547 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:16,548 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:16,548 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:16,549 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,549 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:16,550 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:16,558 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:16,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:16,593 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:16,593 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:16,597 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:16,599 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:16,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,634 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000021_0 is done. And is in the process of committing
+2017-02-18 09:49:16,635 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,636 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000021_0 is allowed to commit now
+2017-02-18 09:49:16,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000021_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000021
+2017-02-18 09:49:16,660 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:16,661 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000021_0' done.
+2017-02-18 09:49:16,661 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000021_0
+2017-02-18 09:49:16,665 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000022_0
+2017-02-18 09:49:16,677 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:16,678 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:16,685 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1906c58a
+2017-02-18 09:49:16,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:16,705 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000022_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:16,722 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 32 len: 79 to MEMORY
+2017-02-18 09:49:16,739 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 32 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:16,739 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 32, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->32
+2017-02-18 09:49:16,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,761 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:16,770 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 32, usedMemory ->34
+2017-02-18 09:49:16,786 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 57 to MEMORY
+2017-02-18 09:49:16,788 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:16,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->46
+2017-02-18 09:49:16,794 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:16,795 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:16,798 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:16,798 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 33 bytes
+2017-02-18 09:49:16,834 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 46 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:16,835 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 99 bytes from disk
+2017-02-18 09:49:16,843 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:16,847 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:16,849 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 37 bytes
+2017-02-18 09:49:16,852 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,880 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000022_0 is done. And is in the process of committing
+2017-02-18 09:49:16,883 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,890 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000022_0 is allowed to commit now
+2017-02-18 09:49:16,894 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000022_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000022
+2017-02-18 09:49:16,899 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:16,902 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000022_0' done.
+2017-02-18 09:49:16,904 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000022_0
+2017-02-18 09:49:16,905 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000023_0
+2017-02-18 09:49:16,914 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:16,915 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:16,915 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@40151a53
+2017-02-18 09:49:16,926 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:16,939 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000023_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:16,945 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 70 to MEMORY
+2017-02-18 09:49:16,951 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:16,954 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23
+2017-02-18 09:49:16,974 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,982 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:16,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25
+2017-02-18 09:49:16,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:16,991 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:16,996 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27
+2017-02-18 09:49:16,998 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:16,999 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:16,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:17,002 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:17,002 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes
+2017-02-18 09:49:17,045 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:17,047 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 84 bytes from disk
+2017-02-18 09:49:17,050 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:17,051 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:17,053 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes
+2017-02-18 09:49:17,060 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,090 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000023_0 is done. And is in the process of committing
+2017-02-18 09:49:17,092 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,092 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000023_0 is allowed to commit now
+2017-02-18 09:49:17,093 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000023_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000023
+2017-02-18 09:49:17,097 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:17,100 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000023_0' done.
+2017-02-18 09:49:17,100 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000023_0
+2017-02-18 09:49:17,101 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000024_0
+2017-02-18 09:49:17,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:17,117 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:17,117 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@9fc1ec5
+2017-02-18 09:49:17,122 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:17,137 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000024_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:17,143 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,149 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:17,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:17,159 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,170 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:17,174 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:17,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,188 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:17,211 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:17,212 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:17,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:17,215 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:17,216 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:17,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:17,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:17,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:17,246 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:17,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:17,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,299 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000024_0 is done. And is in the process of committing
+2017-02-18 09:49:17,301 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,301 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000024_0 is allowed to commit now
+2017-02-18 09:49:17,302 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000024_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000024
+2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000024_0' done.
+2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000024_0
+2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000025_0
+2017-02-18 09:49:17,322 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:17,323 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:17,324 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2793d5d7
+2017-02-18 09:49:17,333 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:17,344 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000025_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:17,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,363 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:17,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:17,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,374 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-02-18 09:49:17,377 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:17,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:17,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,395 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:17,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:17,401 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:17,402 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,402 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:17,403 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:17,406 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:17,429 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:17,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:17,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:17,449 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:17,472 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:17,473 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,498 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000025_0 is done. And is in the process of committing
+2017-02-18 09:49:17,500 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,500 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000025_0 is allowed to commit now
+2017-02-18 09:49:17,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000025_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000025
+2017-02-18 09:49:17,520 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000025_0' done.
+2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000025_0
+2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000026_0
+2017-02-18 09:49:17,535 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:17,536 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:17,537 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5413556b
+2017-02-18 09:49:17,545 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:17,560 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000026_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:17,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:17,572 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:17,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11
+2017-02-18 09:49:17,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:17,586 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:17,589 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22
+2017-02-18 09:49:17,592 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:17,597 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:17,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33
+2017-02-18 09:49:17,610 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:17,611 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:17,613 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:17,613 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes
+2017-02-18 09:49:17,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:17,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk
+2017-02-18 09:49:17,652 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:17,652 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:17,653 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes
+2017-02-18 09:49:17,657 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,693 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000026_0 is done. And is in the process of committing
+2017-02-18 09:49:17,696 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,709 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000026_0 is allowed to commit now
+2017-02-18 09:49:17,710 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000026_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000026
+2017-02-18 09:49:17,711 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:17,719 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000026_0' done.
+2017-02-18 09:49:17,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000026_0
+2017-02-18 09:49:17,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000027_0
+2017-02-18 09:49:17,729 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:17,730 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:17,730 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cee97a9
+2017-02-18 09:49:17,745 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:17,765 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000027_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:17,771 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 14 len: 60 to MEMORY
+2017-02-18 09:49:17,781 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 14 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:17,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 14, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->14
+2017-02-18 09:49:17,788 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:17,808 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 14, usedMemory ->16
+2017-02-18 09:49:17,810 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:17,827 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:17,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 16, usedMemory ->18
+2017-02-18 09:49:17,828 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:17,829 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,829 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:17,830 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:17,838 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:17,859 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 18 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:17,878 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 74 bytes from disk
+2017-02-18 09:49:17,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:17,880 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:17,881 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:17,891 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,941 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000027_0 is done. And is in the process of committing
+2017-02-18 09:49:17,942 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:17,942 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000027_0 is allowed to commit now
+2017-02-18 09:49:17,943 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000027_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000027
+2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000027_0' done.
+2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000027_0
+2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000028_0
+2017-02-18 09:49:17,960 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:17,961 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:17,961 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@760bfc86
+2017-02-18 09:49:17,973 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:17,990 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000028_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:17,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 19 len: 64 to MEMORY
+2017-02-18 09:49:18,010 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:18,010 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->19
+2017-02-18 09:49:18,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 10 len: 54 to MEMORY
+2017-02-18 09:49:18,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:18,050 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10, inMemoryMapOutputs.size() -> 2, commitMemory -> 19, usedMemory ->29
+2017-02-18 09:49:18,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 10 len: 55 to MEMORY
+2017-02-18 09:49:18,071 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:18,071 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10, inMemoryMapOutputs.size() -> 3, commitMemory -> 29, usedMemory ->39
+2017-02-18 09:49:18,072 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:18,072 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,073 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:18,075 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:18,076 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes
+2017-02-18 09:49:18,125 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 39 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:18,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 90 bytes from disk
+2017-02-18 09:49:18,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:18,133 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:18,175 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 31 bytes
+2017-02-18 09:49:18,176 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,204 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000028_0 is done. And is in the process of committing
+2017-02-18 09:49:18,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,218 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000028_0 is allowed to commit now
+2017-02-18 09:49:18,219 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000028_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000028
+2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000028_0' done.
+2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000028_0
+2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000029_0
+2017-02-18 09:49:18,230 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:18,235 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:18,246 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6cf7a61b
+2017-02-18 09:49:18,260 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:18,283 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000029_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:18,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:18,292 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:18,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:18,312 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 13 len: 58 to MEMORY
+2017-02-18 09:49:18,315 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:18,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->15
+2017-02-18 09:49:18,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:18,327 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:18,329 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17
+2017-02-18 09:49:18,331 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:18,334 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:18,335 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:18,344 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:18,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:18,380 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 58%
+2017-02-18 09:49:18,382 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 72 bytes from disk
+2017-02-18 09:49:18,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:18,387 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:18,388 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:18,394 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,432 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000029_0 is done. And is in the process of committing
+2017-02-18 09:49:18,434 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,434 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000029_0 is allowed to commit now
+2017-02-18 09:49:18,435 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000029_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000029
+2017-02-18 09:49:18,438 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:18,443 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000029_0' done.
+2017-02-18 09:49:18,444 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000029_0
+2017-02-18 09:49:18,449 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000030_0
+2017-02-18 09:49:18,455 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:18,455 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:18,456 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f37d9b5
+2017-02-18 09:49:18,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:18,480 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000030_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:18,492 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 68 to MEMORY
+2017-02-18 09:49:18,495 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:18,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22
+2017-02-18 09:49:18,502 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 60 to MEMORY
+2017-02-18 09:49:18,508 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:18,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->34
+2017-02-18 09:49:18,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 22 len: 70 to MEMORY
+2017-02-18 09:49:18,519 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:18,522 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->56
+2017-02-18 09:49:18,524 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:18,525 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,525 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:18,527 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:18,527 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 38 bytes
+2017-02-18 09:49:18,574 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 56 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:18,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 104 bytes from disk
+2017-02-18 09:49:18,580 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:18,581 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:18,582 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 46 bytes
+2017-02-18 09:49:18,586 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,619 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000030_0 is done. And is in the process of committing
+2017-02-18 09:49:18,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,621 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000030_0 is allowed to commit now
+2017-02-18 09:49:18,623 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000030_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000030
+2017-02-18 09:49:18,626 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:18,629 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000030_0' done.
+2017-02-18 09:49:18,629 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000030_0
+2017-02-18 09:49:18,630 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000031_0
+2017-02-18 09:49:18,642 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:18,643 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:18,644 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1d2ab9ab
+2017-02-18 09:49:18,651 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:18,665 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000031_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:18,672 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 43 len: 94 to MEMORY
+2017-02-18 09:49:18,678 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 43 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:18,681 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 43, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->43
+2017-02-18 09:49:18,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 59 to MEMORY
+2017-02-18 09:49:18,690 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:18,701 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 43, usedMemory ->54
+2017-02-18 09:49:18,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 21 len: 70 to MEMORY
+2017-02-18 09:49:18,712 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 21 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:18,716 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 21, inMemoryMapOutputs.size() -> 3, commitMemory -> 54, usedMemory ->75
+2017-02-18 09:49:18,718 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:18,719 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,719 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:18,720 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:18,721 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 59 bytes
+2017-02-18 09:49:18,765 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 75 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:18,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 124 bytes from disk
+2017-02-18 09:49:18,769 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:18,769 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:18,771 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 65 bytes
+2017-02-18 09:49:18,775 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,811 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000031_0 is done. And is in the process of committing
+2017-02-18 09:49:18,816 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,816 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000031_0 is allowed to commit now
+2017-02-18 09:49:18,817 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000031_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000031
+2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000031_0' done.
+2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000031_0
+2017-02-18 09:49:18,831 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000032_0
+2017-02-18 09:49:18,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:18,842 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:18,843 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3fb90b1b
+2017-02-18 09:49:18,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:18,869 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000032_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:18,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 20 len: 66 to MEMORY
+2017-02-18 09:49:18,882 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 20 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:18,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 20, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->20
+2017-02-18 09:49:18,898 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:18,901 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:18,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 20, usedMemory ->31
+2017-02-18 09:49:18,915 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY
+2017-02-18 09:49:18,922 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:18,929 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 31, usedMemory ->42
+2017-02-18 09:49:18,932 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:18,940 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:18,940 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:18,941 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:18,941 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes
+2017-02-18 09:49:18,981 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 42 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:18,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 87 bytes from disk
+2017-02-18 09:49:18,987 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:18,988 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:18,989 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 33 bytes
+2017-02-18 09:49:18,993 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,028 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000032_0 is done. And is in the process of committing
+2017-02-18 09:49:19,030 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000032_0 is allowed to commit now
+2017-02-18 09:49:19,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000032_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000032
+2017-02-18 09:49:19,036 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:19,038 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000032_0' done.
+2017-02-18 09:49:19,038 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000032_0
+2017-02-18 09:49:19,039 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000033_0
+2017-02-18 09:49:19,046 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:19,046 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:19,047 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@44aa3f1a
+2017-02-18 09:49:19,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:19,069 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000033_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:19,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 59 to MEMORY
+2017-02-18 09:49:19,081 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:19,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11
+2017-02-18 09:49:19,087 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:19,094 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:19,097 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22
+2017-02-18 09:49:19,099 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY
+2017-02-18 09:49:19,115 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:19,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33
+2017-02-18 09:49:19,125 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:19,126 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,126 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:19,127 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:19,127 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes
+2017-02-18 09:49:19,155 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:19,158 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 82 bytes from disk
+2017-02-18 09:49:19,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:19,160 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:19,161 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes
+2017-02-18 09:49:19,165 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000033_0 is done. And is in the process of committing
+2017-02-18 09:49:19,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000033_0 is allowed to commit now
+2017-02-18 09:49:19,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000033_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000033
+2017-02-18 09:49:19,215 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:19,218 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000033_0' done.
+2017-02-18 09:49:19,219 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000033_0
+2017-02-18 09:49:19,221 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000034_0
+2017-02-18 09:49:19,226 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:19,227 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:19,227 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4b9eaa86
+2017-02-18 09:49:19,233 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:19,248 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000034_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:19,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 57 to MEMORY
+2017-02-18 09:49:19,261 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:19,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11
+2017-02-18 09:49:19,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:19,272 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:19,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->13
+2017-02-18 09:49:19,278 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY
+2017-02-18 09:49:19,284 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:19,287 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 13, usedMemory ->24
+2017-02-18 09:49:19,288 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:19,289 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:19,290 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:19,291 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 12 bytes
+2017-02-18 09:49:19,323 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 24 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:19,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 81 bytes from disk
+2017-02-18 09:49:19,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:19,334 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:19,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes
+2017-02-18 09:49:19,344 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,377 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000034_0 is done. And is in the process of committing
+2017-02-18 09:49:19,378 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,379 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000034_0 is allowed to commit now
+2017-02-18 09:49:19,379 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000034_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000034
+2017-02-18 09:49:19,384 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 68%
+2017-02-18 09:49:19,386 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:19,388 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000034_0' done.
+2017-02-18 09:49:19,389 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000034_0
+2017-02-18 09:49:19,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000035_0
+2017-02-18 09:49:19,400 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:19,401 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:19,403 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@23bd25db
+2017-02-18 09:49:19,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:19,422 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000035_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:19,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 72 to MEMORY
+2017-02-18 09:49:19,449 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:19,456 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23
+2017-02-18 09:49:19,461 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:19,468 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:19,473 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25
+2017-02-18 09:49:19,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:19,487 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:19,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27
+2017-02-18 09:49:19,491 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:19,492 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,492 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:19,504 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:19,504 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes
+2017-02-18 09:49:19,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:19,541 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 86 bytes from disk
+2017-02-18 09:49:19,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:19,545 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:19,547 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes
+2017-02-18 09:49:19,550 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,579 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000035_0 is done. And is in the process of committing
+2017-02-18 09:49:19,584 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,584 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000035_0 is allowed to commit now
+2017-02-18 09:49:19,593 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000035_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000035
+2017-02-18 09:49:19,598 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:19,600 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000035_0' done.
+2017-02-18 09:49:19,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000035_0
+2017-02-18 09:49:19,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000036_0
+2017-02-18 09:49:19,613 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:19,614 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:19,616 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@f70f500
+2017-02-18 09:49:19,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:19,636 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000036_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:19,643 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 31 len: 83 to MEMORY
+2017-02-18 09:49:19,653 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:19,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->31
+2017-02-18 09:49:19,661 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 19 len: 65 to MEMORY
+2017-02-18 09:49:19,667 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:19,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 2, commitMemory -> 31, usedMemory ->50
+2017-02-18 09:49:19,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 19 len: 67 to MEMORY
+2017-02-18 09:49:19,680 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:19,684 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 3, commitMemory -> 50, usedMemory ->69
+2017-02-18 09:49:19,685 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:19,686 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,686 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:19,687 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:19,690 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 54 bytes
+2017-02-18 09:49:19,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 69 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk
+2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:19,727 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 60 bytes
+2017-02-18 09:49:19,729 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,767 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000036_0 is done. And is in the process of committing
+2017-02-18 09:49:19,768 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,768 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000036_0 is allowed to commit now
+2017-02-18 09:49:19,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000036_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000036
+2017-02-18 09:49:19,773 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:19,775 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000036_0' done.
+2017-02-18 09:49:19,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000036_0
+2017-02-18 09:49:19,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000037_0
+2017-02-18 09:49:19,783 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:19,785 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:19,786 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4624d4d1
+2017-02-18 09:49:19,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:19,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000037_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:19,825 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 68 to MEMORY
+2017-02-18 09:49:19,830 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:19,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22
+2017-02-18 09:49:19,840 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 55 to MEMORY
+2017-02-18 09:49:19,848 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:19,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->33
+2017-02-18 09:49:19,859 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:19,865 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:19,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 33, usedMemory ->35
+2017-02-18 09:49:19,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:19,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:19,873 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:19,873 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 23 bytes
+2017-02-18 09:49:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 35 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:19,901 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 87 bytes from disk
+2017-02-18 09:49:19,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:19,903 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:19,919 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes
+2017-02-18 09:49:19,920 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,958 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000037_0 is done. And is in the process of committing
+2017-02-18 09:49:19,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:19,960 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000037_0 is allowed to commit now
+2017-02-18 09:49:19,961 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000037_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000037
+2017-02-18 09:49:19,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:19,972 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000037_0' done.
+2017-02-18 09:49:19,973 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000037_0
+2017-02-18 09:49:19,974 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000038_0
+2017-02-18 09:49:19,984 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:19,984 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:19,985 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7d2b79ba
+2017-02-18 09:49:19,993 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:20,012 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000038_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:20,020 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 70 to MEMORY
+2017-02-18 09:49:20,027 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:20,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22
+2017-02-18 09:49:20,033 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,038 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:20,042 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->24
+2017-02-18 09:49:20,051 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,058 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:20,062 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 24, usedMemory ->26
+2017-02-18 09:49:20,064 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:20,065 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,065 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:20,066 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:20,067 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes
+2017-02-18 09:49:20,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 26 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:20,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 84 bytes from disk
+2017-02-18 09:49:20,094 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:20,094 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:20,098 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes
+2017-02-18 09:49:20,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,145 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000038_0 is done. And is in the process of committing
+2017-02-18 09:49:20,147 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,147 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000038_0 is allowed to commit now
+2017-02-18 09:49:20,148 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000038_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000038
+2017-02-18 09:49:20,151 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:20,153 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000038_0' done.
+2017-02-18 09:49:20,153 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000038_0
+2017-02-18 09:49:20,154 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000039_0
+2017-02-18 09:49:20,162 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:20,163 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:20,164 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@53944f2c
+2017-02-18 09:49:20,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:20,192 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000039_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:20,199 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 59 to MEMORY
+2017-02-18 09:49:20,207 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:20,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12
+2017-02-18 09:49:20,220 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,230 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:20,232 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14
+2017-02-18 09:49:20,244 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,249 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:20,252 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16
+2017-02-18 09:49:20,254 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:20,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,255 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:20,257 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:20,257 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:20,272 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:20,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 73 bytes from disk
+2017-02-18 09:49:20,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:20,298 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:20,301 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:20,304 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,337 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000039_0 is done. And is in the process of committing
+2017-02-18 09:49:20,338 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,338 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000039_0 is allowed to commit now
+2017-02-18 09:49:20,340 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000039_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000039
+2017-02-18 09:49:20,345 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:20,346 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000039_0' done.
+2017-02-18 09:49:20,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000039_0
+2017-02-18 09:49:20,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000040_0
+2017-02-18 09:49:20,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:20,362 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:20,369 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4cafad79
+2017-02-18 09:49:20,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:20,392 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000040_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:20,394 INFO org.apache.hadoop.mapreduce.Job:  map 100% reduce 100%
+2017-02-18 09:49:20,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,406 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:20,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:20,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,423 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:20,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:20,429 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,445 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:20,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:20,450 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:20,451 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,451 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:20,464 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:20,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:20,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:20,519 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:20,520 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,570 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000040_0 is done. And is in the process of committing
+2017-02-18 09:49:20,571 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,571 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000040_0 is allowed to commit now
+2017-02-18 09:49:20,572 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000040_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000040
+2017-02-18 09:49:20,573 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:20,574 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000040_0' done.
+2017-02-18 09:49:20,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000040_0
+2017-02-18 09:49:20,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000041_0
+2017-02-18 09:49:20,594 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:20,595 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:20,595 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@61da1de
+2017-02-18 09:49:20,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:20,635 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000041_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:20,637 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 44 len: 89 to MEMORY
+2017-02-18 09:49:20,645 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 44 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:20,645 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 44, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->44
+2017-02-18 09:49:20,647 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 22 len: 69 to MEMORY
+2017-02-18 09:49:20,664 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:20,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 2, commitMemory -> 44, usedMemory ->66
+2017-02-18 09:49:20,666 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:20,687 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:20,698 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 66, usedMemory ->68
+2017-02-18 09:49:20,702 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:20,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,704 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:20,705 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:20,705 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 54 bytes
+2017-02-18 09:49:20,719 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 68 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:20,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 111 bytes from disk
+2017-02-18 09:49:20,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:20,736 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:20,737 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 57 bytes
+2017-02-18 09:49:20,740 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000041_0 is done. And is in the process of committing
+2017-02-18 09:49:20,777 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,778 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000041_0 is allowed to commit now
+2017-02-18 09:49:20,778 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000041_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000041
+2017-02-18 09:49:20,779 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:20,781 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000041_0' done.
+2017-02-18 09:49:20,787 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000041_0
+2017-02-18 09:49:20,788 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000042_0
+2017-02-18 09:49:20,795 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:20,796 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:20,798 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@207bfdc3
+2017-02-18 09:49:20,804 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:20,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000042_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:20,830 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 34 len: 81 to MEMORY
+2017-02-18 09:49:20,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 34 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:20,846 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 34, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->34
+2017-02-18 09:49:20,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 58 to MEMORY
+2017-02-18 09:49:20,856 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:20,863 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 34, usedMemory ->46
+2017-02-18 09:49:20,866 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 22 len: 69 to MEMORY
+2017-02-18 09:49:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:20,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 3, commitMemory -> 46, usedMemory ->68
+2017-02-18 09:49:20,879 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:20,880 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:20,881 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:20,881 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 50 bytes
+2017-02-18 09:49:20,893 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 68 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:20,906 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 113 bytes from disk
+2017-02-18 09:49:20,907 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:20,908 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:20,929 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 58 bytes
+2017-02-18 09:49:20,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,962 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000042_0 is done. And is in the process of committing
+2017-02-18 09:49:20,963 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:20,964 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000042_0 is allowed to commit now
+2017-02-18 09:49:20,964 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000042_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000042
+2017-02-18 09:49:20,965 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:20,968 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000042_0' done.
+2017-02-18 09:49:20,972 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000042_0
+2017-02-18 09:49:20,973 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000043_0
+2017-02-18 09:49:20,980 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:20,981 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:20,984 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1681a6f6
+2017-02-18 09:49:20,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:21,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000043_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:21,018 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 71 to MEMORY
+2017-02-18 09:49:21,024 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:21,029 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23
+2017-02-18 09:49:21,036 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,045 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:21,048 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25
+2017-02-18 09:49:21,050 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,055 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:21,061 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27
+2017-02-18 09:49:21,062 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:21,063 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,064 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:21,065 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:21,065 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes
+2017-02-18 09:49:21,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:21,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk
+2017-02-18 09:49:21,104 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:21,104 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:21,105 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes
+2017-02-18 09:49:21,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,147 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000043_0 is done. And is in the process of committing
+2017-02-18 09:49:21,148 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,149 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000043_0 is allowed to commit now
+2017-02-18 09:49:21,149 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000043_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000043
+2017-02-18 09:49:21,150 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000043_0' done.
+2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000043_0
+2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000044_0
+2017-02-18 09:49:21,175 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:21,176 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:21,177 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3306c9fa
+2017-02-18 09:49:21,182 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:21,196 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000044_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:21,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 24 len: 73 to MEMORY
+2017-02-18 09:49:21,234 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 24 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:21,234 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 24, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->24
+2017-02-18 09:49:21,248 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 24 len: 71 to MEMORY
+2017-02-18 09:49:21,249 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 24 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:21,254 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 24, inMemoryMapOutputs.size() -> 2, commitMemory -> 24, usedMemory ->48
+2017-02-18 09:49:21,255 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 13 len: 60 to MEMORY
+2017-02-18 09:49:21,257 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:21,268 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 48, usedMemory ->61
+2017-02-18 09:49:21,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:21,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:21,271 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:21,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 40 bytes
+2017-02-18 09:49:21,279 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 61 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:21,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 105 bytes from disk
+2017-02-18 09:49:21,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:21,316 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:21,317 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 50 bytes
+2017-02-18 09:49:21,322 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,353 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000044_0 is done. And is in the process of committing
+2017-02-18 09:49:21,356 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,356 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000044_0 is allowed to commit now
+2017-02-18 09:49:21,365 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000044_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000044
+2017-02-18 09:49:21,376 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:21,376 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000044_0' done.
+2017-02-18 09:49:21,377 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000044_0
+2017-02-18 09:49:21,377 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000045_0
+2017-02-18 09:49:21,385 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:21,386 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:21,388 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1d01f7c1
+2017-02-18 09:49:21,396 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:21,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000045_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:21,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,443 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:21,443 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:21,451 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,454 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:21,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:21,471 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,477 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:21,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:21,482 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:21,483 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,483 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:21,484 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:21,484 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:21,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:21,522 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:21,523 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:21,526 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:21,528 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:21,543 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,578 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000045_0 is done. And is in the process of committing
+2017-02-18 09:49:21,579 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,579 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000045_0 is allowed to commit now
+2017-02-18 09:49:21,580 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000045_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000045
+2017-02-18 09:49:21,581 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:21,583 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000045_0' done.
+2017-02-18 09:49:21,584 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000045_0
+2017-02-18 09:49:21,585 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000046_0
+2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6a45aa68
+2017-02-18 09:49:21,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:21,617 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000046_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:21,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 13 len: 60 to MEMORY
+2017-02-18 09:49:21,635 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:21,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->13
+2017-02-18 09:49:21,656 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,661 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:21,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 13, usedMemory ->15
+2017-02-18 09:49:21,667 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,673 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:21,694 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17
+2017-02-18 09:49:21,694 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:21,695 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:21,696 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:21,696 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:21,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 74 bytes from disk
+2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:21,740 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:21,742 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,768 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000046_0 is done. And is in the process of committing
+2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000046_0 is allowed to commit now
+2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000046_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000046
+2017-02-18 09:49:21,774 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:21,777 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000046_0' done.
+2017-02-18 09:49:21,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000046_0
+2017-02-18 09:49:21,781 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000047_0
+2017-02-18 09:49:21,795 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:21,796 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:21,797 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2c3c5ba6
+2017-02-18 09:49:21,801 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:21,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000047_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:21,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 14 len: 62 to MEMORY
+2017-02-18 09:49:21,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 14 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:21,841 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 14, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->14
+2017-02-18 09:49:21,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,852 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:21,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 14, usedMemory ->16
+2017-02-18 09:49:21,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:21,862 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:21,864 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 16, usedMemory ->18
+2017-02-18 09:49:21,866 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:21,867 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,867 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:21,868 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:21,868 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:21,898 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 18 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:21,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 76 bytes from disk
+2017-02-18 09:49:21,913 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:21,914 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:21,915 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:21,918 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,946 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000047_0 is done. And is in the process of committing
+2017-02-18 09:49:21,958 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:21,967 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000047_0 is allowed to commit now
+2017-02-18 09:49:21,969 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000047_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000047
+2017-02-18 09:49:21,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:21,970 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000047_0' done.
+2017-02-18 09:49:21,976 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000047_0
+2017-02-18 09:49:21,976 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000048_0
+2017-02-18 09:49:21,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:21,986 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:21,986 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1ce764b2
+2017-02-18 09:49:21,991 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:22,015 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000048_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:22,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:22,027 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:22,034 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2
+2017-02-18 09:49:22,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:22,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:22,051 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4
+2017-02-18 09:49:22,067 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:22,068 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:22,084 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6
+2017-02-18 09:49:22,084 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:22,085 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:22,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:22,086 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:22,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:22,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:22,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk
+2017-02-18 09:49:22,141 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:22,141 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:22,142 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes
+2017-02-18 09:49:22,155 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:22,184 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000048_0 is done. And is in the process of committing
+2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000048_0 is allowed to commit now
+2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000048_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000048
+2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000048_0' done.
+2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000048_0
+2017-02-18 09:49:22,188 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000049_0
+2017-02-18 09:49:22,193 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1
+2017-02-18 09:49:22,194 INFO org.apache.hadoop.mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
+2017-02-18 09:49:22,194 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@518f2876
+2017-02-18 09:49:22,217 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10
+2017-02-18 09:49:22,230 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000049_0 Thread started: EventFetcher for fetching Map Completion Events
+2017-02-18 09:49:22,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 57 to MEMORY
+2017-02-18 09:49:22,246 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0
+2017-02-18 09:49:22,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12
+2017-02-18 09:49:22,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:22,248 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0
+2017-02-18 09:49:22,248 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14
+2017-02-18 09:49:22,249 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY
+2017-02-18 09:49:22,250 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0
+2017-02-18 09:49:22,250 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16
+2017-02-18 09:49:22,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning
+2017-02-18 09:49:22,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:22,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs
+2017-02-18 09:49:22,271 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments
+2017-02-18 09:49:22,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:22,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit
+2017-02-18 09:49:22,300 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 71 bytes from disk
+2017-02-18 09:49:22,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce
+2017-02-18 09:49:22,302 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments
+2017-02-18 09:49:22,303 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes
+2017-02-18 09:49:22,314 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:22,356 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000049_0 is done. And is in the process of committing
+2017-02-18 09:49:22,358 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied.
+2017-02-18 09:49:22,358 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000049_0 is allowed to commit now
+2017-02-18 09:49:22,359 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000049_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000049
+2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce
+2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000049_0' done.
+2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000049_0
+2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete.
+2017-02-18 09:49:22,639 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1938545376_0001
+java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest
+	at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573)
+Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest
+	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
+	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
+	at java.security.AccessController.doPrivileged(Native Method)
+	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
+	at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
+	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
+	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
+	... 1 more
+2017-02-18 09:49:23,414 INFO org.apache.hadoop.mapreduce.Job: Job job_local1938545376_0001 failed with state FAILED due to: NA
+2017-02-18 09:49:23,878 INFO org.apache.hadoop.mapreduce.Job: Counters: 30
+	File System Counters
+		FILE: Number of bytes read=1378254888
+		FILE: Number of bytes written=15479292
+		FILE: Number of read operations=0
+		FILE: Number of large read operations=0
+		FILE: Number of write operations=0
+	Map-Reduce Framework
+		Map input records=507535
+		Map output records=4678719
+		Map output bytes=43638689
+		Map output materialized bytes=8043
+		Input split bytes=351
+		Combine input records=4678719
+		Combine output records=131
+		Reduce input groups=77
+		Reduce shuffle bytes=8043
+		Reduce input records=131
+		Reduce output records=77
+		Spilled Records=262
+		Shuffled Maps =150
+		Failed Shuffles=0
+		Merged Map outputs=150
+		GC time elapsed (ms)=866
+		Total committed heap usage (bytes)=10455764992
+	Shuffle Errors
+		BAD_ID=0
+		CONNECTION=0
+		IO_ERROR=0
+		WRONG_LENGTH=0
+		WRONG_MAP=0
+		WRONG_REDUCE=0
+	File Input Format Counters 
+		Bytes Read=26057874
+	File Output Format Counters 
+		Bytes Written=1290
diff --git a/Assign1/output_Q1.iiii/._SUCCESS.crc b/Assign1/output_Q1.iiii/._SUCCESS.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00000.crc b/Assign1/output_Q1.iiii/.part-r-00000.crc
new file mode 100644
index 0000000000000000000000000000000000000000..4af2d9337942b9c1cca8012003bd90cf0b6010ff
GIT binary patch
literal 12
TcmYc;N@ieSU}9*S6`l?N5_<y~

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00001.crc b/Assign1/output_Q1.iiii/.part-r-00001.crc
new file mode 100644
index 0000000000000000000000000000000000000000..d208695bcf93e6ea6ed5714ab0ab0d9e0bead60b
GIT binary patch
literal 12
TcmYc;N@ieSU}Cs)*vb(A6Yc|5

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00002.crc b/Assign1/output_Q1.iiii/.part-r-00002.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00003.crc b/Assign1/output_Q1.iiii/.part-r-00003.crc
new file mode 100644
index 0000000000000000000000000000000000000000..359e99225ab4edee577c1e75c23e244358989f4c
GIT binary patch
literal 12
TcmYc;N@ieSU}9LJ9CRK45$ppa

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00004.crc b/Assign1/output_Q1.iiii/.part-r-00004.crc
new file mode 100644
index 0000000000000000000000000000000000000000..0b630d4e9dbb6fd99d8be5b5e7715be34aeff4f2
GIT binary patch
literal 12
TcmYc;N@ieSU}E^_vwt%H6j=l6

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00005.crc b/Assign1/output_Q1.iiii/.part-r-00005.crc
new file mode 100644
index 0000000000000000000000000000000000000000..d37d501e89aaf31f22c67e2c54ad8d4aca9c3c1a
GIT binary patch
literal 12
TcmYc;N@ieSU}Bh{arZL-66FKK

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00006.crc b/Assign1/output_Q1.iiii/.part-r-00006.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00007.crc b/Assign1/output_Q1.iiii/.part-r-00007.crc
new file mode 100644
index 0000000000000000000000000000000000000000..f2b1a26c9e4c3e22a2c3077ff103aaba21d91bf6
GIT binary patch
literal 12
TcmYc;N@ieSU}Bh?l$!+r5?%ul

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00008.crc b/Assign1/output_Q1.iiii/.part-r-00008.crc
new file mode 100644
index 0000000000000000000000000000000000000000..e94b3d8af26be591bda99b7de227acd7ce92bb5f
GIT binary patch
literal 12
TcmYc;N@ieSU}E6SbWZ{R58nb>

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00009.crc b/Assign1/output_Q1.iiii/.part-r-00009.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00010.crc b/Assign1/output_Q1.iiii/.part-r-00010.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00011.crc b/Assign1/output_Q1.iiii/.part-r-00011.crc
new file mode 100644
index 0000000000000000000000000000000000000000..9ecc7c1d2ea8004c5ed22fd5248c94b5c4f27656
GIT binary patch
literal 12
TcmYc;N@ieSU}9jrv$+QV5()!X

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00012.crc b/Assign1/output_Q1.iiii/.part-r-00012.crc
new file mode 100644
index 0000000000000000000000000000000000000000..ee169fb00eb4be9b2c64bfb738717969be3192b8
GIT binary patch
literal 12
TcmYc;N@ieSU}9KfH*F3860!qV

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00013.crc b/Assign1/output_Q1.iiii/.part-r-00013.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00014.crc b/Assign1/output_Q1.iiii/.part-r-00014.crc
new file mode 100644
index 0000000000000000000000000000000000000000..03cc1ec1922d69d4b650287b92cd186c0f780314
GIT binary patch
literal 12
TcmYc;N@ieSU}9KvCW{#W6Kew?

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00015.crc b/Assign1/output_Q1.iiii/.part-r-00015.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00016.crc b/Assign1/output_Q1.iiii/.part-r-00016.crc
new file mode 100644
index 0000000000000000000000000000000000000000..63e90b294bfb8b5540cafe8960da2f9672e4735d
GIT binary patch
literal 12
TcmYc;N@ieSU}8}8T*wIk5Gn#s

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00017.crc b/Assign1/output_Q1.iiii/.part-r-00017.crc
new file mode 100644
index 0000000000000000000000000000000000000000..7e33850b4dc08df319c69209794071ca88170e52
GIT binary patch
literal 12
TcmYc;N@ieSU}Ct!XQ%-H5rhJE

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00018.crc b/Assign1/output_Q1.iiii/.part-r-00018.crc
new file mode 100644
index 0000000000000000000000000000000000000000..efff1aa87faca13b678ce4bb9253d071905c75ad
GIT binary patch
literal 12
TcmYc;N@ieSU}8A3_S8%O6vPA%

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00019.crc b/Assign1/output_Q1.iiii/.part-r-00019.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00020.crc b/Assign1/output_Q1.iiii/.part-r-00020.crc
new file mode 100644
index 0000000000000000000000000000000000000000..69eb5c6c7a4ee70d4e690a7780aaa6e7502edb26
GIT binary patch
literal 12
TcmYc;N@ieSU}89wH%Ar#6DI>C

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00021.crc b/Assign1/output_Q1.iiii/.part-r-00021.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00022.crc b/Assign1/output_Q1.iiii/.part-r-00022.crc
new file mode 100644
index 0000000000000000000000000000000000000000..138e38fba549e42cfe7ac0acf8fa6d55fd864e61
GIT binary patch
literal 12
TcmYc;N@ieSU}C7QPudCq5@G{P

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00023.crc b/Assign1/output_Q1.iiii/.part-r-00023.crc
new file mode 100644
index 0000000000000000000000000000000000000000..10b80b09b9d7719d889b4fd7e53f700bce5ba9a7
GIT binary patch
literal 12
TcmYc;N@ieSU}A7yFfkVZ5&Hui

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00024.crc b/Assign1/output_Q1.iiii/.part-r-00024.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00025.crc b/Assign1/output_Q1.iiii/.part-r-00025.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00026.crc b/Assign1/output_Q1.iiii/.part-r-00026.crc
new file mode 100644
index 0000000000000000000000000000000000000000..80384bc1b358b9c7ad34e29457ac816a9efb5509
GIT binary patch
literal 12
TcmYc;N@ieSU}D&R;E^Q&6uAS{

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00027.crc b/Assign1/output_Q1.iiii/.part-r-00027.crc
new file mode 100644
index 0000000000000000000000000000000000000000..757be8ac77d9428a78aca8b6ea0c18fac9582545
GIT binary patch
literal 12
TcmYc;N@ieSU}E^m8tMoD5?%t&

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00028.crc b/Assign1/output_Q1.iiii/.part-r-00028.crc
new file mode 100644
index 0000000000000000000000000000000000000000..d7741a04be0274915853dc47ae380d725bcc95d2
GIT binary patch
literal 12
TcmYc;N@ieSU}BJ8*%=K05o7}A

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00029.crc b/Assign1/output_Q1.iiii/.part-r-00029.crc
new file mode 100644
index 0000000000000000000000000000000000000000..dfe9a055c0fdb1bb85b420e16d14332009bc19ef
GIT binary patch
literal 12
TcmYc;N@ieSU}BhUb|n=65}N}>

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00030.crc b/Assign1/output_Q1.iiii/.part-r-00030.crc
new file mode 100644
index 0000000000000000000000000000000000000000..df52b787979f662ff8d0ad7e5466e8ad5f2fb6b0
GIT binary patch
literal 12
TcmYc;N@ieSU}7+vlOYKJ5fuWC

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00031.crc b/Assign1/output_Q1.iiii/.part-r-00031.crc
new file mode 100644
index 0000000000000000000000000000000000000000..da43959170ef5ab231b0b549f74b0a854793734a
GIT binary patch
literal 12
TcmYc;N@ieSU}A_q-0&U%6C?x3

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00032.crc b/Assign1/output_Q1.iiii/.part-r-00032.crc
new file mode 100644
index 0000000000000000000000000000000000000000..9906816314100e07bc4b414f4beb6c22bdcec1cf
GIT binary patch
literal 12
TcmYc;N@ieSU}9LvkSq@X5a|M5

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00033.crc b/Assign1/output_Q1.iiii/.part-r-00033.crc
new file mode 100644
index 0000000000000000000000000000000000000000..0afa14388f27331eb180ef7a555f53c7d27a19ac
GIT binary patch
literal 12
TcmYc;N@ieSU}CUno$Lny5n}?`

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00034.crc b/Assign1/output_Q1.iiii/.part-r-00034.crc
new file mode 100644
index 0000000000000000000000000000000000000000..1fa259401f9008ef4ec9b3813fdcf0b96d1306e4
GIT binary patch
literal 12
TcmYc;N@ieSU}DJIr5FPM5(EO^

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00035.crc b/Assign1/output_Q1.iiii/.part-r-00035.crc
new file mode 100644
index 0000000000000000000000000000000000000000..039f0d9cabe8608edf1a58bfed2a30f26066e274
GIT binary patch
literal 12
TcmYc;N@ieSU}A8~J}d_S5q$#9

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00036.crc b/Assign1/output_Q1.iiii/.part-r-00036.crc
new file mode 100644
index 0000000000000000000000000000000000000000..ef7e140ccaf5ec2cbef026a9dc714e7c7e25ff15
GIT binary patch
literal 12
TcmYc;N@ieSU}7laPrnEN5hMcm

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00037.crc b/Assign1/output_Q1.iiii/.part-r-00037.crc
new file mode 100644
index 0000000000000000000000000000000000000000..2e4ef49a69a3b541840d0810690627cd47db6aa7
GIT binary patch
literal 12
TcmYc;N@ieSU}A{5yZt5r6V3z#

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00038.crc b/Assign1/output_Q1.iiii/.part-r-00038.crc
new file mode 100644
index 0000000000000000000000000000000000000000..9ca82ee332f813060074efc13dd48d4477b9fa48
GIT binary patch
literal 12
TcmYc;N@ieSU}E4`mz@d#4*&uq

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00039.crc b/Assign1/output_Q1.iiii/.part-r-00039.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3db6bfc4054b5537c6108003904632990b64848c
GIT binary patch
literal 12
TcmYc;N@ieSU}E?m_+mW(6z&8f

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00040.crc b/Assign1/output_Q1.iiii/.part-r-00040.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00041.crc b/Assign1/output_Q1.iiii/.part-r-00041.crc
new file mode 100644
index 0000000000000000000000000000000000000000..e9918b91b0fe485bdc6e28c117731272634c903a
GIT binary patch
literal 12
TcmYc;N@ieSU}BhM`i~O;5{v^8

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00042.crc b/Assign1/output_Q1.iiii/.part-r-00042.crc
new file mode 100644
index 0000000000000000000000000000000000000000..94843cd0ac7bef9ebe9303ae1caf1dd41861d861
GIT binary patch
literal 12
TcmYc;N@ieSU}AWA_iHQw76JrQ

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00043.crc b/Assign1/output_Q1.iiii/.part-r-00043.crc
new file mode 100644
index 0000000000000000000000000000000000000000..0985b05e3324189227f0de2ef72c4ea8996cc5fb
GIT binary patch
literal 12
TcmYc;N@ieSU}8948xaQp6CndN

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00044.crc b/Assign1/output_Q1.iiii/.part-r-00044.crc
new file mode 100644
index 0000000000000000000000000000000000000000..12f27511e5f4e0ee0c26c115b3021f6338e7c675
GIT binary patch
literal 12
TcmYc;N@ieSU}DfPE0PBQ55)p1

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00045.crc b/Assign1/output_Q1.iiii/.part-r-00045.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00046.crc b/Assign1/output_Q1.iiii/.part-r-00046.crc
new file mode 100644
index 0000000000000000000000000000000000000000..fc40c340e9e051dc268abae7ac182475fd0f2eae
GIT binary patch
literal 12
TcmYc;N@ieSU}9Lm`O!516z>F8

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00047.crc b/Assign1/output_Q1.iiii/.part-r-00047.crc
new file mode 100644
index 0000000000000000000000000000000000000000..0eef28dbf3983408777a0ed5bce49a288439dd63
GIT binary patch
literal 12
TcmYc;N@ieSU}7+5DLe}R5ElZp

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00048.crc b/Assign1/output_Q1.iiii/.part-r-00048.crc
new file mode 100644
index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c
GIT binary patch
literal 8
PcmYc;N@ieSU}69O2$TUk

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/.part-r-00049.crc b/Assign1/output_Q1.iiii/.part-r-00049.crc
new file mode 100644
index 0000000000000000000000000000000000000000..5bc8ff7b228e689b5c6cd36f600734ef5aa38826
GIT binary patch
literal 12
TcmYc;N@ieSU}EUVF6#gQ5>*2%

literal 0
HcmV?d00001

diff --git a/Assign1/output_Q1.iiii/_SUCCESS b/Assign1/output_Q1.iiii/_SUCCESS
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00000 b/Assign1/output_Q1.iiii/part-r-00000
new file mode 100644
index 0000000..e3b9ee5
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00000
@@ -0,0 +1,2 @@
+be	27239
+up	7138
diff --git a/Assign1/output_Q1.iiii/part-r-00001 b/Assign1/output_Q1.iiii/part-r-00001
new file mode 100644
index 0000000..ebd1876
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00001
@@ -0,0 +1,3 @@
+	202317
+upon	4160
+we	13668
diff --git a/Assign1/output_Q1.iiii/part-r-00002 b/Assign1/output_Q1.iiii/part-r-00002
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00003 b/Assign1/output_Q1.iiii/part-r-00003
new file mode 100644
index 0000000..1b831a2
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00003
@@ -0,0 +1 @@
+our	4088
diff --git a/Assign1/output_Q1.iiii/part-r-00004 b/Assign1/output_Q1.iiii/part-r-00004
new file mode 100644
index 0000000..753a241
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00004
@@ -0,0 +1 @@
+of	119701
diff --git a/Assign1/output_Q1.iiii/part-r-00005 b/Assign1/output_Q1.iiii/part-r-00005
new file mode 100644
index 0000000..de84bdb
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00005
@@ -0,0 +1,3 @@
+his	34380
+out	7891
+with	34665
diff --git a/Assign1/output_Q1.iiii/part-r-00006 b/Assign1/output_Q1.iiii/part-r-00006
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00007 b/Assign1/output_Q1.iiii/part-r-00007
new file mode 100644
index 0000000..5724628
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00007
@@ -0,0 +1,3 @@
+from	9433
+has	5077
+its	4523
diff --git a/Assign1/output_Q1.iiii/part-r-00008 b/Assign1/output_Q1.iiii/part-r-00008
new file mode 100644
index 0000000..d3c47da
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00008
@@ -0,0 +1,2 @@
+not	32386
+said	5039
diff --git a/Assign1/output_Q1.iiii/part-r-00009 b/Assign1/output_Q1.iiii/part-r-00009
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00010 b/Assign1/output_Q1.iiii/part-r-00010
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00011 b/Assign1/output_Q1.iiii/part-r-00011
new file mode 100644
index 0000000..a428884
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00011
@@ -0,0 +1,4 @@
+have	23991
+my	22712
+them	5581
+will	11037
diff --git a/Assign1/output_Q1.iiii/part-r-00012 b/Assign1/output_Q1.iiii/part-r-00012
new file mode 100644
index 0000000..a21dd8f
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00012
@@ -0,0 +1,4 @@
+but	30329
+on	12904
+then	6525
+thy	4028
diff --git a/Assign1/output_Q1.iiii/part-r-00013 b/Assign1/output_Q1.iiii/part-r-00013
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00014 b/Assign1/output_Q1.iiii/part-r-00014
new file mode 100644
index 0000000..57c92b2
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00014
@@ -0,0 +1 @@
+all	11971
diff --git a/Assign1/output_Q1.iiii/part-r-00015 b/Assign1/output_Q1.iiii/part-r-00015
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00016 b/Assign1/output_Q1.iiii/part-r-00016
new file mode 100644
index 0000000..b3df306
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00016
@@ -0,0 +1,2 @@
+could	6614
+or	8610
diff --git a/Assign1/output_Q1.iiii/part-r-00017 b/Assign1/output_Q1.iiii/part-r-00017
new file mode 100644
index 0000000..a59f932
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00017
@@ -0,0 +1 @@
+what	5911
diff --git a/Assign1/output_Q1.iiii/part-r-00018 b/Assign1/output_Q1.iiii/part-r-00018
new file mode 100644
index 0000000..13bbe30
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00018
@@ -0,0 +1,4 @@
+and	167100
+for	36130
+if	7613
+to	114272
diff --git a/Assign1/output_Q1.iiii/part-r-00019 b/Assign1/output_Q1.iiii/part-r-00019
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00020 b/Assign1/output_Q1.iiii/part-r-00020
new file mode 100644
index 0000000..3e43901
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00020
@@ -0,0 +1,2 @@
+about	6121
+by	15659
diff --git a/Assign1/output_Q1.iiii/part-r-00021 b/Assign1/output_Q1.iiii/part-r-00021
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00022 b/Assign1/output_Q1.iiii/part-r-00022
new file mode 100644
index 0000000..5d96b74
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00022
@@ -0,0 +1,3 @@
+do	5427
+like	4217
+was	49624
diff --git a/Assign1/output_Q1.iiii/part-r-00023 b/Assign1/output_Q1.iiii/part-r-00023
new file mode 100644
index 0000000..73404a0
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00023
@@ -0,0 +1,2 @@
+one	9926
+they	14089
diff --git a/Assign1/output_Q1.iiii/part-r-00024 b/Assign1/output_Q1.iiii/part-r-00024
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00025 b/Assign1/output_Q1.iiii/part-r-00025
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00026 b/Assign1/output_Q1.iiii/part-r-00026
new file mode 100644
index 0000000..2c9d461
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00026
@@ -0,0 +1 @@
+in	69179
diff --git a/Assign1/output_Q1.iiii/part-r-00027 b/Assign1/output_Q1.iiii/part-r-00027
new file mode 100644
index 0000000..3630772
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00027
@@ -0,0 +1 @@
+other	4002
diff --git a/Assign1/output_Q1.iiii/part-r-00028 b/Assign1/output_Q1.iiii/part-r-00028
new file mode 100644
index 0000000..3a40b4e
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00028
@@ -0,0 +1,2 @@
+a	99209
+an	8104
diff --git a/Assign1/output_Q1.iiii/part-r-00029 b/Assign1/output_Q1.iiii/part-r-00029
new file mode 100644
index 0000000..1cf7342
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00029
@@ -0,0 +1 @@
+thou	5138
diff --git a/Assign1/output_Q1.iiii/part-r-00030 b/Assign1/output_Q1.iiii/part-r-00030
new file mode 100644
index 0000000..9f45bbf
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00030
@@ -0,0 +1,2 @@
+her	21272
+you	35121
diff --git a/Assign1/output_Q1.iiii/part-r-00031 b/Assign1/output_Q1.iiii/part-r-00031
new file mode 100644
index 0000000..96a0006
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00031
@@ -0,0 +1,4 @@
+got	4047
+is	37433
+she	19935
+their	6428
diff --git a/Assign1/output_Q1.iiii/part-r-00032 b/Assign1/output_Q1.iiii/part-r-00032
new file mode 100644
index 0000000..f055a48
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00032
@@ -0,0 +1,2 @@
+it	50587
+no	7952
diff --git a/Assign1/output_Q1.iiii/part-r-00033 b/Assign1/output_Q1.iiii/part-r-00033
new file mode 100644
index 0000000..415d969
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00033
@@ -0,0 +1 @@
+as	29531
diff --git a/Assign1/output_Q1.iiii/part-r-00034 b/Assign1/output_Q1.iiii/part-r-00034
new file mode 100644
index 0000000..cb3b273
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00034
@@ -0,0 +1 @@
+at	18706
diff --git a/Assign1/output_Q1.iiii/part-r-00035 b/Assign1/output_Q1.iiii/part-r-00035
new file mode 100644
index 0000000..07fb56b
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00035
@@ -0,0 +1,2 @@
+when	8507
+who	4428
diff --git a/Assign1/output_Q1.iiii/part-r-00036 b/Assign1/output_Q1.iiii/part-r-00036
new file mode 100644
index 0000000..fe554d4
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00036
@@ -0,0 +1,3 @@
+he	44747
+i	72836
+would	9567
diff --git a/Assign1/output_Q1.iiii/part-r-00037 b/Assign1/output_Q1.iiii/part-r-00037
new file mode 100644
index 0000000..7c7e486
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00037
@@ -0,0 +1,2 @@
+so	13588
+some	4462
diff --git a/Assign1/output_Q1.iiii/part-r-00038 b/Assign1/output_Q1.iiii/part-r-00038
new file mode 100644
index 0000000..af5fd3c
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00038
@@ -0,0 +1,2 @@
+it.	4557
+see	4020
diff --git a/Assign1/output_Q1.iiii/part-r-00039 b/Assign1/output_Q1.iiii/part-r-00039
new file mode 100644
index 0000000..afba8a9
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00039
@@ -0,0 +1 @@
+any	5067
diff --git a/Assign1/output_Q1.iiii/part-r-00040 b/Assign1/output_Q1.iiii/part-r-00040
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00041 b/Assign1/output_Q1.iiii/part-r-00041
new file mode 100644
index 0000000..a1d0ad9
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00041
@@ -0,0 +1,4 @@
+been	6037
+down	4100
+me	11514
+this	18019
diff --git a/Assign1/output_Q1.iiii/part-r-00042 b/Assign1/output_Q1.iiii/part-r-00042
new file mode 100644
index 0000000..be87ec3
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00042
@@ -0,0 +1,3 @@
+had	24180
+the	208421
+which	7409
diff --git a/Assign1/output_Q1.iiii/part-r-00043 b/Assign1/output_Q1.iiii/part-r-00043
new file mode 100644
index 0000000..192f175
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00043
@@ -0,0 +1,2 @@
+are	7566
+into	4263
diff --git a/Assign1/output_Q1.iiii/part-r-00044 b/Assign1/output_Q1.iiii/part-r-00044
new file mode 100644
index 0000000..505bc88
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00044
@@ -0,0 +1,2 @@
+that	54938
+your	11160
diff --git a/Assign1/output_Q1.iiii/part-r-00045 b/Assign1/output_Q1.iiii/part-r-00045
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00046 b/Assign1/output_Q1.iiii/part-r-00046
new file mode 100644
index 0000000..954b8ca
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00046
@@ -0,0 +1 @@
+were	9682
diff --git a/Assign1/output_Q1.iiii/part-r-00047 b/Assign1/output_Q1.iiii/part-r-00047
new file mode 100644
index 0000000..7b099e1
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00047
@@ -0,0 +1 @@
+there	8909
diff --git a/Assign1/output_Q1.iiii/part-r-00048 b/Assign1/output_Q1.iiii/part-r-00048
new file mode 100644
index 0000000..e69de29
diff --git a/Assign1/output_Q1.iiii/part-r-00049 b/Assign1/output_Q1.iiii/part-r-00049
new file mode 100644
index 0000000..a6a9eee
--- /dev/null
+++ b/Assign1/output_Q1.iiii/part-r-00049
@@ -0,0 +1 @@
+him	8425
diff --git a/Assign1/output_Q1.iiii/stopwords.csv b/Assign1/output_Q1.iiii/stopwords.csv
new file mode 100644
index 0000000..22090ca
--- /dev/null
+++ b/Assign1/output_Q1.iiii/stopwords.csv
@@ -0,0 +1,76 @@
+be
+up
+upon
+we
+our
+of
+his
+out
+with
+from
+has
+its
+not
+said
+have
+my
+them
+will
+but
+on
+then
+thy
+all
+could
+or
+what
+and
+for
+if
+to
+about
+by
+do
+like
+was
+one
+they
+in
+other
+a
+an
+thou
+her
+you
+got
+is
+she
+their
+it
+no
+as
+at
+when
+who
+he
+i
+would
+so
+some
+it.
+see
+any
+been
+down
+me
+this
+had
+the
+which
+are
+into
+that
+your
+were
+there
+him
diff --git a/Assign1/output_Q1.iiii/stopwords.csv~ b/Assign1/output_Q1.iiii/stopwords.csv~
new file mode 100644
index 0000000..17663cc
--- /dev/null
+++ b/Assign1/output_Q1.iiii/stopwords.csv~
@@ -0,0 +1,77 @@
+be
+up
+
+upon
+we
+our
+of
+his
+out
+with
+from
+has
+its
+not
+said
+have
+my
+them
+will
+but
+on
+then
+thy
+all
+could
+or
+what
+and
+for
+if
+to
+about
+by
+do
+like
+was
+one
+they
+in
+other
+a
+an
+thou
+her
+you
+got
+is
+she
+their
+it
+no
+as
+at
+when
+who
+he
+i
+would
+so
+some
+it.
+see
+any
+been
+down
+me
+this
+had
+the
+which
+are
+into
+that
+your
+were
+there
+him
diff --git a/Assign1/src/Question1/Stopword_iiii.java b/Assign1/src/Question1/Stopword_iiii.java
new file mode 100644
index 0000000..6bb89fb
--- /dev/null
+++ b/Assign1/src/Question1/Stopword_iiii.java
@@ -0,0 +1,92 @@
+package Question1;
+
+import java.io.IOException;
+import java.util.Arrays;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.compress.BZip2Codec;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+
+public class Stopword_iiii extends Configured implements Tool {
+   public static void main(String[] args) throws Exception {
+      System.out.println(Arrays.toString(args));
+      int res = ToolRunner.run(new Configuration(), new Stopword_iiii(), args);
+      
+      System.exit(res);
+   }
+
+   @Override
+   public int run(String[] args) throws Exception {
+      System.out.println(Arrays.toString(args));
+      Job job = new Job(getConf(), "Stopword_iiii");
+      job.setJarByClass(Stopword_iiii.class);
+      job.setOutputKeyClass(Text.class);
+      job.setOutputValueClass(IntWritable.class);
+
+      job.setMapperClass(Map.class);
+      job.setReducerClass(Reduce.class);
+      job.setCombinerClass(Reduce.class);
+      
+      job.getConfiguration().setBoolean("mapred.compress.map.output",
+    		  true);
+      job.getConfiguration().setClass("mapred.map.output.compression.codec",
+    		  BZip2Codec.class, CompressionCodec.class);
+
+      job.setInputFormatClass(TextInputFormat.class);
+      job.setOutputFormatClass(TextOutputFormat.class);
+      
+      job.setNumReduceTasks(50);
+      
+
+      FileInputFormat.addInputPath(job, new Path(args[0]));
+      FileInputFormat.addInputPath(job, new Path(args[1]));
+      FileInputFormat.addInputPath(job, new Path(args[2]));
+      FileOutputFormat.setOutputPath(job, new Path(args[3]));
+
+
+      job.waitForCompletion(true);
+      
+      return 0;
+   }
+   
+   public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> {
+      private final static IntWritable ONE = new IntWritable(1);
+      private Text word = new Text();
+
+      @Override
+      public void map(LongWritable key, Text value, Context context)
+              throws IOException, InterruptedException {
+         for (String token: value.toString().split("\\s+")) {
+            word.set(token.toLowerCase());
+            context.write(word, ONE);
+         }
+      }
+   }
+
+   public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> {
+      @Override
+      public void reduce(Text key, Iterable<IntWritable> values, Context context)
+              throws IOException, InterruptedException {
+         int sum = 0;
+         for (IntWritable val : values) {
+            sum += val.get();
+         }
+         if (sum > 4000) {
+        	 context.write(key, new IntWritable(sum));
+         }
+      }
+   }
+}
-- 
GitLab