diff --git a/Assign1/bin/.gitignore b/Assign1/bin/.gitignore index 5085f4521a6a5c57b94e1b74e8386a48b4796edd..155baa89c3df79ca7484e860bfc67749ef28d603 100644 --- a/Assign1/bin/.gitignore +++ b/Assign1/bin/.gitignore @@ -1 +1,4 @@ /Question1/ +/Question2/ +/StubDriver.class +/StubMapper.class diff --git a/Assign1/hadoop.log b/Assign1/hadoop.log index f09b73211fa24900a16d62450472f5fe287d1749..6abb75f392aa5728aa099b72b12dcd64bdf47b34 100644 --- a/Assign1/hadoop.log +++ b/Assign1/hadoop.log @@ -1,4810 +1,199 @@ -2017-02-18 02:36:22,445 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 02:36:24,389 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 02:36:24,424 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 02:37:48,329 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 02:37:49,998 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 02:37:50,000 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 02:37:51,700 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 02:37:51,741 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 -2017-02-18 02:37:52,081 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 -2017-02-18 02:37:53,299 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1560003292_0001 -2017-02-18 02:37:54,908 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 02:37:54,909 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1560003292_0001 -2017-02-18 02:37:54,923 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 02:37:54,983 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 02:37:54,991 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 02:37:55,317 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 02:37:55,321 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1560003292_0001_m_000000_0 -2017-02-18 02:37:55,573 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 02:37:55,691 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 02:37:55,717 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/Assign1/pg100.txt:0+5589886 -2017-02-18 02:37:56,108 INFO org.apache.hadoop.mapreduce.Job: Job job_local1560003292_0001 running in uber mode : false -2017-02-18 02:37:56,130 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 02:37:56,758 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 02:37:56,760 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 02:37:56,761 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 02:37:56,761 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 02:37:56,761 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 02:37:56,794 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 02:38:01,693 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 02:38:02,182 INFO org.apache.hadoop.mapreduce.Job: map 48% reduce 0% -2017-02-18 02:38:02,562 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 02:38:02,565 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 02:38:02,566 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 02:38:02,567 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 02:38:02,567 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 02:38:04,704 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 02:38:05,194 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 02:38:07,715 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 02:38:10,719 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 02:38:11,523 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 02:38:11,593 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1560003292_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 02:38:11,598 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 02:38:11,605 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1560003292_0001_m_000000_0' done. -2017-02-18 02:38:11,609 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1560003292_0001_m_000000_0 -2017-02-18 02:38:11,611 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 02:38:11,630 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 02:38:11,631 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1560003292_0001_r_000000_0 -2017-02-18 02:38:11,696 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 02:38:11,696 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 02:38:11,724 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7b948a59 -2017-02-18 02:38:11,858 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 02:38:11,897 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1560003292_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 02:38:12,182 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1560003292_0001_m_000000_0 decomp: 11218538 len: 11218542 to MEMORY -2017-02-18 02:38:12,220 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 02:38:12,391 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11218538 bytes from map-output for attempt_local1560003292_0001_m_000000_0 -2017-02-18 02:38:12,416 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11218538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11218538 -2017-02-18 02:38:12,427 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 02:38:12,428 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 02:38:12,428 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 02:38:12,535 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 02:38:12,553 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes -2017-02-18 02:38:16,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 11218538 bytes to disk to satisfy reduce memory limit -2017-02-18 02:38:16,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 11218542 bytes from disk -2017-02-18 02:38:16,461 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 02:38:16,461 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 02:38:16,469 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes -2017-02-18 02:38:16,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 02:38:16,598 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 02:38:17,735 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 02:38:18,244 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 68% -2017-02-18 02:38:20,745 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 02:38:21,257 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 88% -2017-02-18 02:38:21,893 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1560003292_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 02:38:21,902 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 02:38:21,907 INFO org.apache.hadoop.mapred.Task: Task attempt_local1560003292_0001_r_000000_0 is allowed to commit now -2017-02-18 02:38:21,909 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1560003292_0001_r_000000_0' to file:/home/cloudera/workspace/Assign1/output/_temporary/0/task_local1560003292_0001_r_000000 -2017-02-18 02:38:21,915 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 02:38:21,917 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1560003292_0001_r_000000_0' done. -2017-02-18 02:38:21,918 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1560003292_0001_r_000000_0 -2017-02-18 02:38:21,919 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 02:38:21,971 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1560003292_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 02:38:22,259 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 02:38:22,260 INFO org.apache.hadoop.mapreduce.Job: Job job_local1560003292_0001 failed with state FAILED due to: NA -2017-02-18 02:38:22,337 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=33617226 - FILE: Number of bytes written=34935992 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=124787 - Map output records=1023444 - Map output bytes=9171648 - Map output materialized bytes=11218542 - Input split bytes=112 - Combine input records=0 - Combine output records=0 - Reduce input groups=67780 - Reduce shuffle bytes=11218542 - Reduce input records=1023444 - Reduce output records=67780 - Spilled Records=2046888 - Shuffled Maps =1 - Failed Shuffles=0 - Merged Map outputs=1 - GC time elapsed (ms)=338 - Total committed heap usage (bytes)=331227136 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=5589886 - File Output Format Counters - Bytes Written=726624 -2017-02-18 04:02:41,586 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 04:02:43,970 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 04:02:44,026 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 04:02:46,105 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 04:02:46,164 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 -2017-02-18 04:02:46,530 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 -2017-02-18 04:02:47,436 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2033431332_0001 -2017-02-18 04:02:49,019 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 04:02:49,020 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2033431332_0001 -2017-02-18 04:02:49,030 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 04:02:49,072 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 04:02:49,099 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 04:02:49,385 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 04:02:49,388 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033431332_0001_m_000000_0 -2017-02-18 04:02:49,620 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 04:02:49,757 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 04:02:49,775 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589886 -2017-02-18 04:02:50,212 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033431332_0001 running in uber mode : false -2017-02-18 04:02:50,219 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 04:02:50,512 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 04:02:50,531 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 04:02:55,692 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 04:02:56,253 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 04:02:56,270 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 04:02:56,274 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 04:02:56,276 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 04:02:56,276 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 04:02:56,277 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 04:02:58,725 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 04:02:59,264 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 04:03:01,728 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 04:03:04,732 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 04:03:05,609 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 04:03:05,656 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033431332_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 04:03:05,670 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 04:03:05,676 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033431332_0001_m_000000_0' done. -2017-02-18 04:03:05,678 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033431332_0001_m_000000_0 -2017-02-18 04:03:05,679 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 04:03:05,699 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 04:03:05,700 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033431332_0001_r_000000_0 -2017-02-18 04:03:05,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 04:03:05,763 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 04:03:05,766 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7075f914 -2017-02-18 04:03:05,868 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 04:03:05,897 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033431332_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 04:03:06,065 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2033431332_0001_m_000000_0 decomp: 11218538 len: 11218542 to MEMORY -2017-02-18 04:03:06,226 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11218538 bytes from map-output for attempt_local2033431332_0001_m_000000_0 -2017-02-18 04:03:06,238 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11218538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11218538 -2017-02-18 04:03:06,255 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 04:03:06,256 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 04:03:06,257 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 04:03:06,313 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 04:03:06,330 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 04:03:06,334 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes -2017-02-18 04:03:09,673 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 11218538 bytes to disk to satisfy reduce memory limit -2017-02-18 04:03:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 11218542 bytes from disk -2017-02-18 04:03:09,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 04:03:09,676 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 04:03:09,677 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes -2017-02-18 04:03:09,689 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 04:03:09,725 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 04:03:11,785 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 04:03:12,342 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 74% -2017-02-18 04:03:14,375 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033431332_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 04:03:14,384 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 04:03:14,388 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033431332_0001_r_000000_0 is allowed to commit now -2017-02-18 04:03:14,390 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033431332_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local2033431332_0001_r_000000 -2017-02-18 04:03:14,396 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 04:03:14,398 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033431332_0001_r_000000_0' done. -2017-02-18 04:03:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033431332_0001_r_000000_0 -2017-02-18 04:03:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 04:03:14,461 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2033431332_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 04:03:15,360 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 04:03:15,361 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033431332_0001 failed with state FAILED due to: NA -2017-02-18 04:03:15,411 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=33617234 - FILE: Number of bytes written=34936048 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=124787 - Map output records=1023444 - Map output bytes=9171648 - Map output materialized bytes=11218542 - Input split bytes=116 - Combine input records=0 - Combine output records=0 - Reduce input groups=67780 - Reduce shuffle bytes=11218542 - Reduce input records=1023444 - Reduce output records=67780 - Spilled Records=2046888 - Shuffled Maps =1 - Failed Shuffles=0 - Merged Map outputs=1 - GC time elapsed (ms)=323 - Total committed heap usage (bytes)=331227136 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=5589886 - File Output Format Counters - Bytes Written=726624 -2017-02-18 04:04:46,638 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 04:04:48,239 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 04:04:48,274 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 04:04:49,758 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 04:04:49,787 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 -2017-02-18 04:04:50,101 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 -2017-02-18 04:04:50,998 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1076588983_0001 -2017-02-18 04:04:52,443 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 04:04:52,444 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1076588983_0001 -2017-02-18 04:04:52,456 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 04:04:52,491 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 04:04:52,520 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 04:04:52,807 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 04:04:52,810 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1076588983_0001_m_000000_0 -2017-02-18 04:04:52,978 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 04:04:53,055 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 04:04:53,060 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 04:04:53,448 INFO org.apache.hadoop.mapreduce.Job: Job job_local1076588983_0001 running in uber mode : false -2017-02-18 04:04:53,450 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 04:04:53,697 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 04:04:53,717 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 04:04:53,718 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 04:04:53,719 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 04:04:53,719 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 04:04:53,737 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 04:04:53,759 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 04:04:59,032 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 04:04:59,495 INFO org.apache.hadoop.mapreduce.Job: map 54% reduce 0% -2017-02-18 04:04:59,714 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 04:04:59,718 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 04:04:59,718 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 04:04:59,719 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 04:04:59,719 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 04:05:02,040 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 04:05:02,507 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 04:05:05,048 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 04:05:08,050 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 04:05:09,429 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 04:05:09,462 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1076588983_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 04:05:09,468 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 04:05:09,471 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1076588983_0001_m_000000_0' done. -2017-02-18 04:05:09,472 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1076588983_0001_m_000000_0 -2017-02-18 04:05:09,473 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 04:05:09,492 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 04:05:09,492 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1076588983_0001_r_000000_0 -2017-02-18 04:05:09,541 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 04:05:09,542 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 04:05:09,547 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@113db302 -2017-02-18 04:05:09,550 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 04:05:09,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 04:05:09,670 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1076588983_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 04:05:09,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1076588983_0001_m_000000_0 decomp: 11218538 len: 11218542 to MEMORY -2017-02-18 04:05:09,965 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11218538 bytes from map-output for attempt_local1076588983_0001_m_000000_0 -2017-02-18 04:05:09,989 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11218538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11218538 -2017-02-18 04:05:10,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 04:05:10,005 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 04:05:10,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 04:05:10,244 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 04:05:10,251 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes -2017-02-18 04:05:13,750 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 11218538 bytes to disk to satisfy reduce memory limit -2017-02-18 04:05:13,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 11218542 bytes from disk -2017-02-18 04:05:13,760 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 04:05:13,763 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 04:05:13,764 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes -2017-02-18 04:05:13,769 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 04:05:13,825 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 04:05:15,562 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 04:05:15,597 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 69% -2017-02-18 04:05:18,569 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 04:05:18,608 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 98% -2017-02-18 04:05:18,772 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1076588983_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 04:05:18,780 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 04:05:18,784 INFO org.apache.hadoop.mapred.Task: Task attempt_local1076588983_0001_r_000000_0 is allowed to commit now -2017-02-18 04:05:18,785 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1076588983_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1076588983_0001_r_000000 -2017-02-18 04:05:18,790 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 04:05:18,792 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1076588983_0001_r_000000_0' done. -2017-02-18 04:05:18,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1076588983_0001_r_000000_0 -2017-02-18 04:05:18,794 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 04:05:18,863 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1076588983_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 04:05:19,616 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 04:05:19,617 INFO org.apache.hadoop.mapreduce.Job: Job job_local1076588983_0001 failed with state FAILED due to: NA -2017-02-18 04:05:19,660 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=33617240 - FILE: Number of bytes written=34936048 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=124787 - Map output records=1023444 - Map output bytes=9171648 - Map output materialized bytes=11218542 - Input split bytes=116 - Combine input records=0 - Combine output records=0 - Reduce input groups=67780 - Reduce shuffle bytes=11218542 - Reduce input records=1023444 - Reduce output records=67780 - Spilled Records=2046888 - Shuffled Maps =1 - Failed Shuffles=0 - Merged Map outputs=1 - GC time elapsed (ms)=272 - Total committed heap usage (bytes)=331227136 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=5589889 - File Output Format Counters - Bytes Written=726624 -2017-02-18 06:19:24,805 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 06:20:24,453 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 06:20:27,186 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 06:20:27,188 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 06:20:29,525 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 06:20:29,602 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 -2017-02-18 06:20:30,050 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 -2017-02-18 06:20:31,621 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local224597268_0001 -2017-02-18 06:20:33,564 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 06:20:33,566 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local224597268_0001 -2017-02-18 06:20:33,595 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 06:20:33,663 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:20:33,685 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 06:20:34,258 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 06:20:34,261 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_m_000000_0 -2017-02-18 06:20:34,553 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:20:34,596 INFO org.apache.hadoop.mapreduce.Job: Job job_local224597268_0001 running in uber mode : false -2017-02-18 06:20:34,604 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 06:20:34,725 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:20:34,763 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:20:37,502 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:20:37,565 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:20:43,656 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:20:44,652 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 06:20:45,757 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:20:45,762 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:20:45,765 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:20:45,766 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 06:20:45,767 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 06:20:46,673 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:20:47,658 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 06:20:49,678 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:20:52,682 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:20:55,683 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:20:57,582 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:20:57,630 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 06:20:57,636 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:20:57,643 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_m_000000_0' done. -2017-02-18 06:20:57,645 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_m_000000_0 -2017-02-18 06:20:57,646 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 06:20:57,681 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:20:57,771 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 06:20:57,772 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000000_0 -2017-02-18 06:20:57,834 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:20:57,835 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:20:57,875 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430 -2017-02-18 06:20:58,015 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:20:58,054 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:20:58,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 991747 len: 991751 to MEMORY -2017-02-18 06:20:58,388 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:20:58,407 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 -2017-02-18 06:20:58,429 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:20:58,431 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:20:58,432 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:20:58,510 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:20:58,520 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes -2017-02-18 06:20:59,614 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 991747 bytes to disk to satisfy reduce memory limit -2017-02-18 06:20:59,616 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 991751 bytes from disk -2017-02-18 06:20:59,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:20:59,629 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:20:59,632 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes -2017-02-18 06:20:59,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:20:59,699 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 06:21:01,420 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 06:21:01,442 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:01,443 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000000_0 is allowed to commit now -2017-02-18 06:21:01,444 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000000 -2017-02-18 06:21:01,458 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:01,459 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000000_0' done. -2017-02-18 06:21:01,459 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000000_0 -2017-02-18 06:21:01,467 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000001_0 -2017-02-18 06:21:01,476 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:01,477 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:01,490 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1798ca83 -2017-02-18 06:21:01,501 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:01,518 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:01,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1980898 len: 1980902 to MEMORY -2017-02-18 06:21:01,604 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:01,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 -2017-02-18 06:21:01,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:01,606 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:01,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:01,608 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:01,608 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes -2017-02-18 06:21:01,686 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 06:21:02,479 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1980898 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1980902 bytes from disk -2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes -2017-02-18 06:21:02,485 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:03,660 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 06:21:03,694 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:03,696 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000001_0 is allowed to commit now -2017-02-18 06:21:03,702 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000001 -2017-02-18 06:21:03,708 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:03,714 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000001_0' done. -2017-02-18 06:21:03,715 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000001_0 -2017-02-18 06:21:03,716 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000002_0 -2017-02-18 06:21:03,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:03,728 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:03,729 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@29d1cb0d -2017-02-18 06:21:03,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:03,757 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:03,776 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1199435 len: 1199439 to MEMORY -2017-02-18 06:21:03,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:03,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 -2017-02-18 06:21:03,814 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:03,815 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:03,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:03,819 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:03,819 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes -2017-02-18 06:21:04,243 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1199435 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:04,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1199439 bytes from disk -2017-02-18 06:21:04,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:04,248 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:04,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes -2017-02-18 06:21:04,250 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:04,693 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 06:21:04,762 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 06:21:04,780 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:04,785 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000002_0 is allowed to commit now -2017-02-18 06:21:04,792 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000002 -2017-02-18 06:21:04,798 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:04,807 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000002_0' done. -2017-02-18 06:21:04,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000002_0 -2017-02-18 06:21:04,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000003_0 -2017-02-18 06:21:04,822 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:04,823 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:04,824 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@581b236 -2017-02-18 06:21:04,838 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:04,857 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:04,887 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 914896 len: 914900 to MEMORY -2017-02-18 06:21:04,898 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:04,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 -2017-02-18 06:21:04,904 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:04,905 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:04,906 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:04,908 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:04,909 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes -2017-02-18 06:21:05,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 914896 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:05,232 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 914900 bytes from disk -2017-02-18 06:21:05,234 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:05,235 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:05,236 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes -2017-02-18 06:21:05,237 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:05,589 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 06:21:05,597 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:05,602 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000003_0 is allowed to commit now -2017-02-18 06:21:05,608 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000003 -2017-02-18 06:21:05,619 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:05,620 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000003_0' done. -2017-02-18 06:21:05,621 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000003_0 -2017-02-18 06:21:05,622 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000004_0 -2017-02-18 06:21:05,634 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:05,635 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:05,635 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@731e0de -2017-02-18 06:21:05,659 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:05,672 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:05,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1165904 len: 1165908 to MEMORY -2017-02-18 06:21:05,695 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:21:05,725 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:05,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 -2017-02-18 06:21:05,738 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:05,744 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:05,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:05,745 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:05,746 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes -2017-02-18 06:21:06,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1165904 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:06,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1165908 bytes from disk -2017-02-18 06:21:06,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:06,179 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:06,180 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes -2017-02-18 06:21:06,182 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:06,683 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 06:21:06,699 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:06,702 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 06:21:06,712 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000004_0 is allowed to commit now -2017-02-18 06:21:06,716 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000004 -2017-02-18 06:21:06,720 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:06,725 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000004_0' done. -2017-02-18 06:21:06,726 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000004_0 -2017-02-18 06:21:06,727 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000005_0 -2017-02-18 06:21:06,739 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:06,747 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:06,760 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@39a83e27 -2017-02-18 06:21:06,774 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:06,790 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:06,799 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 867472 len: 867476 to MEMORY -2017-02-18 06:21:06,851 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:06,851 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 -2017-02-18 06:21:06,852 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:06,853 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:06,853 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:06,855 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:06,855 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes -2017-02-18 06:21:07,146 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 867472 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:07,148 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 867476 bytes from disk -2017-02-18 06:21:07,150 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:07,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:07,153 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes -2017-02-18 06:21:07,157 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:07,479 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 06:21:07,497 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:07,510 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000005_0 is allowed to commit now -2017-02-18 06:21:07,513 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000005 -2017-02-18 06:21:07,518 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:07,524 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000005_0' done. -2017-02-18 06:21:07,525 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000005_0 -2017-02-18 06:21:07,526 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000006_0 -2017-02-18 06:21:07,543 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:07,544 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:07,545 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cb9c5d -2017-02-18 06:21:07,561 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:07,580 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:07,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1080418 len: 1080422 to MEMORY -2017-02-18 06:21:07,656 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:07,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 -2017-02-18 06:21:07,657 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:07,658 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:07,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:07,659 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:07,659 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes -2017-02-18 06:21:07,713 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 06:21:08,052 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1080418 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:08,059 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1080422 bytes from disk -2017-02-18 06:21:08,061 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:08,062 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:08,062 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes -2017-02-18 06:21:08,064 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:08,497 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 06:21:08,501 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:08,508 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000006_0 is allowed to commit now -2017-02-18 06:21:08,511 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000006 -2017-02-18 06:21:08,519 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:08,522 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000006_0' done. -2017-02-18 06:21:08,528 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000006_0 -2017-02-18 06:21:08,529 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000007_0 -2017-02-18 06:21:08,536 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:08,537 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:08,543 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5bfefcd4 -2017-02-18 06:21:08,560 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:08,572 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:08,587 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 821532 len: 821536 to MEMORY -2017-02-18 06:21:08,598 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:08,620 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 -2017-02-18 06:21:08,621 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:08,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:08,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:08,624 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:08,626 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes -2017-02-18 06:21:08,714 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% -2017-02-18 06:21:08,910 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 821532 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:08,914 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 821536 bytes from disk -2017-02-18 06:21:08,916 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:08,916 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:08,917 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes -2017-02-18 06:21:08,919 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:09,223 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 06:21:09,239 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:09,249 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000007_0 is allowed to commit now -2017-02-18 06:21:09,251 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000007 -2017-02-18 06:21:09,259 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:09,266 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000007_0' done. -2017-02-18 06:21:09,267 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000007_0 -2017-02-18 06:21:09,267 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000008_0 -2017-02-18 06:21:09,277 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:09,278 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:09,278 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@66a62053 -2017-02-18 06:21:09,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:09,313 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:09,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1360514 len: 1360518 to MEMORY -2017-02-18 06:21:09,348 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:09,371 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 -2017-02-18 06:21:09,372 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:09,373 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:09,373 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:09,376 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:09,376 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes -2017-02-18 06:21:09,715 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 06:21:09,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1360514 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:09,878 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1360518 bytes from disk -2017-02-18 06:21:09,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:09,886 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:09,888 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes -2017-02-18 06:21:09,890 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:10,440 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 06:21:10,457 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:10,466 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000008_0 is allowed to commit now -2017-02-18 06:21:10,468 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000008 -2017-02-18 06:21:10,474 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:10,486 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000008_0' done. -2017-02-18 06:21:10,487 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000008_0 -2017-02-18 06:21:10,488 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000009_0 -2017-02-18 06:21:10,501 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:21:10,502 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:21:10,511 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41eab4e0 -2017-02-18 06:21:10,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:21:10,538 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:21:10,574 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 835740 len: 835744 to MEMORY -2017-02-18 06:21:10,603 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local224597268_0001_m_000000_0 -2017-02-18 06:21:10,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 -2017-02-18 06:21:10,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:21:10,606 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:10,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:21:10,608 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:10,608 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes -2017-02-18 06:21:10,716 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% -2017-02-18 06:21:10,913 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 835740 bytes to disk to satisfy reduce memory limit -2017-02-18 06:21:10,918 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 835744 bytes from disk -2017-02-18 06:21:10,919 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:21:10,919 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:21:10,920 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes -2017-02-18 06:21:10,926 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:11,260 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 06:21:11,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:21:11,282 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000009_0 is allowed to commit now -2017-02-18 06:21:11,298 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000009 -2017-02-18 06:21:11,300 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:21:11,303 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000009_0' done. -2017-02-18 06:21:11,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000009_0 -2017-02-18 06:21:11,304 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 06:21:11,475 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local224597268_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 06:21:11,720 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:21:11,721 INFO org.apache.hadoop.mapreduce.Job: Job job_local224597268_0001 failed with state FAILED due to: NA -2017-02-18 06:21:11,958 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=192567978 - FILE: Number of bytes written=191860141 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=124787 - Map output records=1023444 - Map output bytes=9171648 - Map output materialized bytes=11218596 - Input split bytes=116 - Combine input records=0 - Combine output records=0 - Reduce input groups=67780 - Reduce shuffle bytes=11218596 - Reduce input records=1023444 - Reduce output records=27 - Spilled Records=2046888 - Shuffled Maps =10 - Failed Shuffles=0 - Merged Map outputs=10 - GC time elapsed (ms)=337 - Total committed heap usage (bytes)=1821749248 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=5589889 - File Output Format Counters - Bytes Written=358 -2017-02-18 06:32:37,775 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 06:32:40,633 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 06:32:40,989 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 06:32:41,025 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 06:32:43,943 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 06:32:43,999 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 06:32:44,121 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 06:32:44,304 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 -2017-02-18 06:32:45,122 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 -2017-02-18 06:32:46,719 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 06:32:46,835 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 -2017-02-18 06:32:47,295 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local961145712_0001 -2017-02-18 06:32:47,386 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 -2017-02-18 06:32:49,204 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2033279662_0001 -2017-02-18 06:32:49,880 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 06:32:49,882 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local961145712_0001 -2017-02-18 06:32:49,907 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 06:32:50,028 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:32:50,055 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 06:32:50,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 06:32:50,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_m_000000_0 -2017-02-18 06:32:50,887 INFO org.apache.hadoop.mapreduce.Job: Job job_local961145712_0001 running in uber mode : false -2017-02-18 06:32:50,943 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 06:32:51,189 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:32:51,395 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:32:51,413 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 06:32:51,983 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 06:32:51,985 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2033279662_0001 -2017-02-18 06:32:52,029 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 06:32:52,095 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:32:52,107 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 06:32:53,025 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033279662_0001 running in uber mode : false -2017-02-18 06:32:53,566 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 06:32:54,509 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 06:32:54,532 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:32:55,134 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:32:55,155 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:32:55,156 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:32:55,156 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:32:55,156 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:32:55,210 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:32:55,544 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:32:55,692 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:32:55,709 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:32:55,807 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 06:32:59,289 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:32:59,352 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:32:59,510 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:33:00,946 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:33:01,174 INFO org.apache.hadoop.mapreduce.Job: map 2% reduce 0% -2017-02-18 06:33:03,950 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:33:04,184 INFO org.apache.hadoop.mapreduce.Job: map 18% reduce 0% -2017-02-18 06:33:04,335 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:33:05,264 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% -2017-02-18 06:33:06,969 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:33:07,196 INFO org.apache.hadoop.mapreduce.Job: map 53% reduce 0% -2017-02-18 06:33:07,339 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:33:08,031 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 06:33:08,273 INFO org.apache.hadoop.mapreduce.Job: map 32% reduce 0% -2017-02-18 06:33:09,913 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 06:33:09,971 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:10,206 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 06:33:10,340 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:11,286 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 06:33:12,972 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:13,342 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:15,973 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:16,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:18,981 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:19,347 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:21,984 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:22,348 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:33:22,816 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:33:22,846 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 06:33:22,859 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:33:22,873 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_m_000000_0' done. -2017-02-18 06:33:22,873 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:22,873 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 06:33:23,018 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 06:33:23,019 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000000_0 -2017-02-18 06:33:23,100 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:23,101 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:23,158 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@44f9c0c -2017-02-18 06:33:23,244 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:33:23,342 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:23,391 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:23,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 991747 len: 991751 to MEMORY -2017-02-18 06:33:23,798 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:23,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 -2017-02-18 06:33:23,854 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:23,855 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:23,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:23,917 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:23,918 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes -2017-02-18 06:33:25,206 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:33:25,255 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 06:33:25,258 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:33:25,273 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_m_000000_0' done. -2017-02-18 06:33:25,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:25,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 06:33:25,293 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 991747 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:25,294 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 991751 bytes from disk -2017-02-18 06:33:25,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:25,296 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:25,296 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes -2017-02-18 06:33:25,319 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:33:25,326 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:25,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 06:33:25,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000000_0 -2017-02-18 06:33:25,433 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 06:33:25,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:25,524 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:25,548 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1e60a696 -2017-02-18 06:33:25,733 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:25,782 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:26,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 991747 len: 991751 to MEMORY -2017-02-18 06:33:26,269 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:26,293 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 -2017-02-18 06:33:26,317 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:26,318 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:26,325 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:26,376 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:26,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes -2017-02-18 06:33:27,956 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 06:33:27,995 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:27,995 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000000_0 is allowed to commit now -2017-02-18 06:33:27,996 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000000 -2017-02-18 06:33:28,010 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:28,023 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000000_0' done. -2017-02-18 06:33:28,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000000_0 -2017-02-18 06:33:28,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000001_0 -2017-02-18 06:33:28,049 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:28,050 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:28,050 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1eb6a3c -2017-02-18 06:33:28,060 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:28,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:28,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 991747 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:28,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 991751 bytes from disk -2017-02-18 06:33:28,101 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:28,101 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:28,102 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes -2017-02-18 06:33:28,102 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:28,092 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1980898 len: 1980902 to MEMORY -2017-02-18 06:33:28,141 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:28,162 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 -2017-02-18 06:33:28,163 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:28,164 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:28,164 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:28,165 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:28,165 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes -2017-02-18 06:33:28,195 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 06:33:28,254 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 06:33:29,638 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1980898 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1980902 bytes from disk -2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes -2017-02-18 06:33:29,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:30,595 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 06:33:30,639 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:30,639 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000000_0 is allowed to commit now -2017-02-18 06:33:30,641 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000000 -2017-02-18 06:33:30,658 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:30,659 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000000_0' done. -2017-02-18 06:33:30,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000000_0 -2017-02-18 06:33:30,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000001_0 -2017-02-18 06:33:30,692 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:30,694 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:30,694 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@30eab853 -2017-02-18 06:33:30,711 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:30,755 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:30,770 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1980898 len: 1980902 to MEMORY -2017-02-18 06:33:30,796 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:30,860 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 -2017-02-18 06:33:30,861 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:30,862 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:30,862 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:30,864 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:30,864 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes -2017-02-18 06:33:31,350 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 06:33:31,388 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 06:33:31,398 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:31,399 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000001_0 is allowed to commit now -2017-02-18 06:33:31,423 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000001 -2017-02-18 06:33:31,432 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:31,432 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000001_0' done. -2017-02-18 06:33:31,432 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000001_0 -2017-02-18 06:33:31,433 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000002_0 -2017-02-18 06:33:31,451 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:31,452 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:31,452 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2deeef8c -2017-02-18 06:33:31,469 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:31,490 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:31,504 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1199435 len: 1199439 to MEMORY -2017-02-18 06:33:31,527 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:31,552 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 -2017-02-18 06:33:31,552 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:31,553 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:31,554 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:31,555 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:31,555 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes -2017-02-18 06:33:32,112 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1199435 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:32,113 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1199439 bytes from disk -2017-02-18 06:33:32,121 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:32,122 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:32,122 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes -2017-02-18 06:33:32,123 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:32,265 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 06:33:32,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1980898 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:32,288 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1980902 bytes from disk -2017-02-18 06:33:32,288 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:32,289 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:32,289 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes -2017-02-18 06:33:32,290 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:32,889 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 06:33:32,890 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:32,891 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000002_0 is allowed to commit now -2017-02-18 06:33:32,892 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000002 -2017-02-18 06:33:32,934 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:32,935 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000002_0' done. -2017-02-18 06:33:32,935 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000002_0 -2017-02-18 06:33:32,935 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000003_0 -2017-02-18 06:33:32,968 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:32,970 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:32,983 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@11a9281f -2017-02-18 06:33:33,003 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:33,035 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:33,097 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 914896 len: 914900 to MEMORY -2017-02-18 06:33:33,114 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:33,115 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 -2017-02-18 06:33:33,128 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:33,130 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:33,130 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:33,131 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:33,131 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes -2017-02-18 06:33:33,268 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 06:33:33,590 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 914896 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:33,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 914900 bytes from disk -2017-02-18 06:33:33,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:33,603 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:33,604 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes -2017-02-18 06:33:33,604 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:33,864 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 06:33:33,879 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:33,889 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000001_0 is allowed to commit now -2017-02-18 06:33:33,891 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000001 -2017-02-18 06:33:33,902 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:33,902 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000001_0' done. -2017-02-18 06:33:33,903 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000001_0 -2017-02-18 06:33:33,903 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000002_0 -2017-02-18 06:33:33,932 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:33,933 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:33,933 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7b4fb4eb -2017-02-18 06:33:33,944 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:33,968 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:33,994 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1199435 len: 1199439 to MEMORY -2017-02-18 06:33:34,020 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:34,035 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 -2017-02-18 06:33:34,036 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:34,038 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:34,039 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:34,040 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:34,040 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes -2017-02-18 06:33:34,122 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 06:33:34,137 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:34,138 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000003_0 is allowed to commit now -2017-02-18 06:33:34,139 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000003 -2017-02-18 06:33:34,162 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:34,163 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000003_0' done. -2017-02-18 06:33:34,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000003_0 -2017-02-18 06:33:34,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000004_0 -2017-02-18 06:33:34,186 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:34,187 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:34,197 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@15f3ca88 -2017-02-18 06:33:34,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:34,236 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:34,244 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1165904 len: 1165908 to MEMORY -2017-02-18 06:33:34,269 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:33:34,292 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:34,293 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 -2017-02-18 06:33:34,294 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:34,294 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:34,294 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:34,296 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:34,296 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes -2017-02-18 06:33:34,361 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 06:33:34,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1199435 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:34,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1199439 bytes from disk -2017-02-18 06:33:34,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:34,676 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:34,689 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes -2017-02-18 06:33:34,690 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:34,911 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1165904 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:34,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1165908 bytes from disk -2017-02-18 06:33:34,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:34,912 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:34,913 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes -2017-02-18 06:33:34,913 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:35,271 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 06:33:35,386 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 06:33:35,388 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:35,413 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000002_0 is allowed to commit now -2017-02-18 06:33:35,415 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000002 -2017-02-18 06:33:35,416 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:35,424 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000002_0' done. -2017-02-18 06:33:35,424 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000002_0 -2017-02-18 06:33:35,424 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000003_0 -2017-02-18 06:33:35,460 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:35,462 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:35,462 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@17fdb5c7 -2017-02-18 06:33:35,473 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:35,496 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:35,506 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 914896 len: 914900 to MEMORY -2017-02-18 06:33:35,552 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:35,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 -2017-02-18 06:33:35,577 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:35,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:35,578 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:35,593 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:35,593 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes -2017-02-18 06:33:35,693 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 06:33:35,708 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:35,724 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000004_0 is allowed to commit now -2017-02-18 06:33:35,751 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000004 -2017-02-18 06:33:35,753 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:35,767 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000004_0' done. -2017-02-18 06:33:35,768 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000004_0 -2017-02-18 06:33:35,768 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000005_0 -2017-02-18 06:33:35,784 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:35,785 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:35,804 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3aa101a7 -2017-02-18 06:33:35,821 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:35,846 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:35,855 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 867472 len: 867476 to MEMORY -2017-02-18 06:33:35,879 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:35,901 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 -2017-02-18 06:33:35,903 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:35,904 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:35,904 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:35,905 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:35,905 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes -2017-02-18 06:33:36,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 914896 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:36,090 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 914900 bytes from disk -2017-02-18 06:33:36,090 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:36,091 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:36,105 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes -2017-02-18 06:33:36,106 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:36,272 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% -2017-02-18 06:33:36,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 867472 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 867476 bytes from disk -2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes -2017-02-18 06:33:36,364 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 06:33:36,375 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:36,566 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 06:33:36,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:36,622 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000003_0 is allowed to commit now -2017-02-18 06:33:36,623 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000003 -2017-02-18 06:33:36,636 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:36,637 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000003_0' done. -2017-02-18 06:33:36,637 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000003_0 -2017-02-18 06:33:36,638 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000004_0 -2017-02-18 06:33:36,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:36,653 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:36,654 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@207e7fdb -2017-02-18 06:33:36,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:36,693 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:36,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1165904 len: 1165908 to MEMORY -2017-02-18 06:33:36,734 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:36,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 -2017-02-18 06:33:36,735 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:36,736 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:36,736 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:36,739 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:36,739 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes -2017-02-18 06:33:36,793 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 06:33:36,795 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:36,819 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000005_0 is allowed to commit now -2017-02-18 06:33:36,821 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000005 -2017-02-18 06:33:36,838 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:36,839 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000005_0' done. -2017-02-18 06:33:36,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000005_0 -2017-02-18 06:33:36,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000006_0 -2017-02-18 06:33:36,861 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:36,862 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:36,862 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41d04f3b -2017-02-18 06:33:36,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:36,899 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:36,931 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1080418 len: 1080422 to MEMORY -2017-02-18 06:33:36,959 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:36,977 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 -2017-02-18 06:33:36,978 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:36,978 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:36,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:36,980 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:36,980 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes -2017-02-18 06:33:37,273 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 06:33:37,364 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 06:33:37,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1165904 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:37,371 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1165908 bytes from disk -2017-02-18 06:33:37,373 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:37,373 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:37,373 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes -2017-02-18 06:33:37,374 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:37,566 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1080418 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1080422 bytes from disk -2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes -2017-02-18 06:33:37,568 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:38,122 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 06:33:38,145 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 06:33:38,147 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:38,147 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000004_0 is allowed to commit now -2017-02-18 06:33:38,149 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000004 -2017-02-18 06:33:38,154 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:38,154 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000006_0 is allowed to commit now -2017-02-18 06:33:38,155 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000006 -2017-02-18 06:33:38,157 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:38,158 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000004_0' done. -2017-02-18 06:33:38,158 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000004_0 -2017-02-18 06:33:38,158 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000005_0 -2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000006_0' done. -2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000006_0 -2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000007_0 -2017-02-18 06:33:38,181 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:38,182 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:38,182 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22d0ce11 -2017-02-18 06:33:38,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:38,184 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:38,202 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:38,203 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@57e13166 -2017-02-18 06:33:38,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:38,217 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:38,231 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:38,233 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 867472 len: 867476 to MEMORY -2017-02-18 06:33:38,260 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 821532 len: 821536 to MEMORY -2017-02-18 06:33:38,271 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:38,236 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:38,279 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 -2017-02-18 06:33:38,280 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:38,281 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:38,281 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:38,282 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:38,283 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes -2017-02-18 06:33:38,275 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:33:38,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 -2017-02-18 06:33:38,295 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:38,296 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:38,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:38,298 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:38,298 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes -2017-02-18 06:33:38,427 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% -2017-02-18 06:33:38,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 821532 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:38,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 821536 bytes from disk -2017-02-18 06:33:38,732 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:38,732 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:38,733 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes -2017-02-18 06:33:38,733 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:38,756 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 867472 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:38,758 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 867476 bytes from disk -2017-02-18 06:33:38,758 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:38,760 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:38,766 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes -2017-02-18 06:33:38,767 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:39,085 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 06:33:39,140 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:39,151 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000007_0 is allowed to commit now -2017-02-18 06:33:39,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000007 -2017-02-18 06:33:39,154 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:39,171 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000007_0' done. -2017-02-18 06:33:39,171 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000007_0 -2017-02-18 06:33:39,171 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000008_0 -2017-02-18 06:33:39,187 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:39,188 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:39,201 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33ed55b9 -2017-02-18 06:33:39,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 06:33:39,212 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:39,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:39,225 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000005_0 is allowed to commit now -2017-02-18 06:33:39,243 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:39,243 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000005 -2017-02-18 06:33:39,245 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:39,254 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000005_0' done. -2017-02-18 06:33:39,254 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000005_0 -2017-02-18 06:33:39,254 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000006_0 -2017-02-18 06:33:39,267 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1360514 len: 1360518 to MEMORY -2017-02-18 06:33:39,271 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:39,281 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:39,282 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:39,282 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4cda661a -2017-02-18 06:33:39,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:39,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 -2017-02-18 06:33:39,317 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:39,318 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:39,319 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:39,320 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:39,320 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes -2017-02-18 06:33:39,331 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:39,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1080418 len: 1080422 to MEMORY -2017-02-18 06:33:39,395 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:39,396 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 -2017-02-18 06:33:39,397 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:39,398 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:39,398 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:39,399 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:39,400 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes -2017-02-18 06:33:39,428 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 06:33:39,982 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1080418 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:39,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1080422 bytes from disk -2017-02-18 06:33:39,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:39,983 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:39,987 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes -2017-02-18 06:33:39,996 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:40,054 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1360514 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:40,072 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1360518 bytes from disk -2017-02-18 06:33:40,072 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:40,073 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:40,073 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes -2017-02-18 06:33:40,074 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:40,296 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 06:33:40,610 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 06:33:40,613 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:40,640 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000006_0 is allowed to commit now -2017-02-18 06:33:40,641 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000006 -2017-02-18 06:33:40,642 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:40,652 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000006_0' done. -2017-02-18 06:33:40,652 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000006_0 -2017-02-18 06:33:40,652 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000007_0 -2017-02-18 06:33:40,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:40,665 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:40,678 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7fc514a7 -2017-02-18 06:33:40,687 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:40,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 821532 len: 821536 to MEMORY -2017-02-18 06:33:40,768 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:40,769 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 -2017-02-18 06:33:40,769 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:40,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:40,770 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:40,772 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:40,772 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes -2017-02-18 06:33:40,839 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 06:33:40,869 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:40,880 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000008_0 is allowed to commit now -2017-02-18 06:33:40,881 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000008 -2017-02-18 06:33:40,893 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:40,893 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000008_0' done. -2017-02-18 06:33:40,894 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000008_0 -2017-02-18 06:33:40,910 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000009_0 -2017-02-18 06:33:40,924 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:40,925 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:40,926 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@75096410 -2017-02-18 06:33:40,935 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:40,963 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:40,978 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 835740 len: 835744 to MEMORY -2017-02-18 06:33:41,021 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local961145712_0001_m_000000_0 -2017-02-18 06:33:41,021 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 -2017-02-18 06:33:41,022 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:41,023 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:41,023 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:41,025 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:41,025 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes -2017-02-18 06:33:41,177 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 821532 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:41,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 821536 bytes from disk -2017-02-18 06:33:41,179 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:41,179 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:41,179 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes -2017-02-18 06:33:41,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:41,300 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% -2017-02-18 06:33:41,436 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% -2017-02-18 06:33:41,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 835740 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 835744 bytes from disk -2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes -2017-02-18 06:33:41,464 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:41,642 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 06:33:41,688 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:41,689 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000007_0 is allowed to commit now -2017-02-18 06:33:41,705 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000007 -2017-02-18 06:33:41,706 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:41,721 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000007_0' done. -2017-02-18 06:33:41,721 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000007_0 -2017-02-18 06:33:41,721 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000008_0 -2017-02-18 06:33:41,732 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:41,733 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:41,757 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7888a8ee -2017-02-18 06:33:41,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:41,794 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:41,817 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1360514 len: 1360518 to MEMORY -2017-02-18 06:33:41,861 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:41,862 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 -2017-02-18 06:33:41,862 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:41,863 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:41,863 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:41,865 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:41,865 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes -2017-02-18 06:33:41,981 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 06:33:42,006 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:42,020 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000009_0 is allowed to commit now -2017-02-18 06:33:42,021 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000009 -2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000009_0' done. -2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000009_0 -2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 06:33:42,301 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:33:42,437 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 06:33:42,476 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local961145712_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 06:33:42,798 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1360514 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:42,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1360518 bytes from disk -2017-02-18 06:33:42,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:42,799 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:42,800 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes -2017-02-18 06:33:42,800 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:43,303 INFO org.apache.hadoop.mapreduce.Job: Job job_local961145712_0001 failed with state FAILED due to: NA -2017-02-18 06:33:43,633 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=192567978 - FILE: Number of bytes written=191859731 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=124787 - Map output records=1023444 - Map output bytes=9171648 - Map output materialized bytes=11218596 - Input split bytes=116 - Combine input records=0 - Combine output records=0 - Reduce input groups=67780 - Reduce shuffle bytes=11218596 - Reduce input records=1023444 - Reduce output records=27 - Spilled Records=2046888 - Shuffled Maps =10 - Failed Shuffles=0 - Merged Map outputs=10 - GC time elapsed (ms)=543 - Total committed heap usage (bytes)=1821749248 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=5589889 - File Output Format Counters - Bytes Written=268 -2017-02-18 06:33:43,644 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 06:33:43,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:43,677 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000008_0 is allowed to commit now -2017-02-18 06:33:43,681 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000008 -2017-02-18 06:33:43,691 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:43,701 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000008_0' done. -2017-02-18 06:33:43,702 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000008_0 -2017-02-18 06:33:43,712 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000009_0 -2017-02-18 06:33:43,728 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:33:43,729 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:33:43,739 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1197c2dd -2017-02-18 06:33:43,761 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:33:43,778 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:33:43,793 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 835740 len: 835744 to MEMORY -2017-02-18 06:33:43,820 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local2033279662_0001_m_000000_0 -2017-02-18 06:33:43,830 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 -2017-02-18 06:33:43,831 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:33:43,831 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:43,832 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:33:43,833 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:43,833 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes -2017-02-18 06:33:44,218 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 835740 bytes to disk to satisfy reduce memory limit -2017-02-18 06:33:44,221 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 835744 bytes from disk -2017-02-18 06:33:44,225 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:33:44,227 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:33:44,228 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes -2017-02-18 06:33:44,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:44,443 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% -2017-02-18 06:33:44,573 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 06:33:44,577 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 06:33:44,583 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000009_0 is allowed to commit now -2017-02-18 06:33:44,590 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000009 -2017-02-18 06:33:44,596 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:33:44,600 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000009_0' done. -2017-02-18 06:33:44,602 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000009_0 -2017-02-18 06:33:44,602 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 06:33:44,704 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2033279662_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 06:33:45,444 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:33:45,445 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033279662_0001 failed with state FAILED due to: NA -2017-02-18 06:33:45,698 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=192567978 - FILE: Number of bytes written=191875879 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=124787 - Map output records=1023444 - Map output bytes=9171648 - Map output materialized bytes=11218596 - Input split bytes=116 - Combine input records=0 - Combine output records=0 - Reduce input groups=67780 - Reduce shuffle bytes=11218596 - Reduce input records=1023444 - Reduce output records=27 - Spilled Records=2046888 - Shuffled Maps =10 - Failed Shuffles=0 - Merged Map outputs=10 - GC time elapsed (ms)=495 - Total committed heap usage (bytes)=1821749248 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=5589889 - File Output Format Counters - Bytes Written=268 -2017-02-18 06:43:25,426 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 06:43:28,141 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 06:43:28,157 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 06:43:30,140 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 06:43:30,229 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 06:43:30,641 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 06:43:32,213 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local767302091_0001 -2017-02-18 06:43:34,192 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 06:43:34,193 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local767302091_0001 -2017-02-18 06:43:34,210 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 06:43:34,295 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:43:34,310 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 06:43:34,780 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 06:43:34,782 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_m_000000_0 -2017-02-18 06:43:35,010 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:43:35,115 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:43:35,147 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 06:43:35,196 INFO org.apache.hadoop.mapreduce.Job: Job job_local767302091_0001 running in uber mode : false -2017-02-18 06:43:35,198 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 06:43:35,853 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:43:35,866 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:43:35,867 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:43:35,868 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:43:35,868 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:43:35,890 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:43:35,944 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:43:41,093 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:43:41,228 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% -2017-02-18 06:43:44,095 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:43:44,240 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% -2017-02-18 06:43:47,108 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:43:47,246 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% -2017-02-18 06:43:50,111 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:43:50,254 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% -2017-02-18 06:43:50,504 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:43:50,511 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:43:50,512 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:43:50,513 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 06:43:50,513 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 06:43:53,112 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:43:53,262 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 06:43:56,113 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:43:59,115 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:02,117 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:05,121 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:08,125 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:11,129 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:14,132 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:24,792 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:44:24,818 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 06:44:24,830 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:44:24,837 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_m_000000_0' done. -2017-02-18 06:44:24,838 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_m_000000_0 -2017-02-18 06:44:24,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_m_000001_0 -2017-02-18 06:44:24,856 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:44:24,857 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:44:24,873 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 06:44:25,245 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:44:25,248 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:44:25,248 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:44:25,249 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:44:25,249 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:44:25,259 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:44:25,268 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:44:25,328 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:44:29,321 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 06:44:29,351 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:44:29,352 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:44:29,352 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 06:44:29,352 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 06:44:30,339 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 06:44:30,880 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:31,341 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 06:44:33,883 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:36,889 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:39,581 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:44:39,604 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 06:44:39,610 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:44:39,614 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_m_000001_0' done. -2017-02-18 06:44:39,615 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_m_000001_0 -2017-02-18 06:44:39,616 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_m_000002_0 -2017-02-18 06:44:39,624 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:44:39,625 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:44:39,626 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 06:44:40,121 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:44:40,137 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:44:40,140 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:44:40,141 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:44:40,141 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:44:40,149 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:44:40,151 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:44:40,363 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:44:43,697 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 06:44:44,371 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 06:44:45,636 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:46,373 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 06:44:48,638 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:51,640 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:44:52,111 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:44:52,125 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 06:44:52,134 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:44:52,144 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_m_000002_0' done. -2017-02-18 06:44:52,145 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_m_000002_0 -2017-02-18 06:44:52,146 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 06:44:52,242 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 06:44:52,243 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000000_0 -2017-02-18 06:44:52,287 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:44:52,288 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:44:52,315 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f357760 -2017-02-18 06:44:52,388 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:44:52,421 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:44:52,466 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:44:52,702 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 991747 len: 991751 to MEMORY -2017-02-18 06:44:52,748 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:44:52,773 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 -2017-02-18 06:44:52,790 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2685875 len: 2685879 to MEMORY -2017-02-18 06:44:52,827 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2685875 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:44:52,859 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2685875, inMemoryMapOutputs.size() -> 2, commitMemory -> 991747, usedMemory ->3677622 -2017-02-18 06:44:52,865 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 881571 len: 881575 to MEMORY -2017-02-18 06:44:52,877 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 881571 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:44:52,891 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 881571, inMemoryMapOutputs.size() -> 3, commitMemory -> 3677622, usedMemory ->4559193 -2017-02-18 06:44:52,893 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:44:52,894 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:44:52,895 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:44:52,923 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:44:52,928 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4559151 bytes -2017-02-18 06:44:55,329 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4559193 bytes to disk to satisfy reduce memory limit -2017-02-18 06:44:55,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4559193 bytes from disk -2017-02-18 06:44:55,342 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:44:55,345 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:44:55,346 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4559175 bytes -2017-02-18 06:44:55,351 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:44:55,403 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 06:44:58,216 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 06:44:58,249 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:44:58,257 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000000_0 is allowed to commit now -2017-02-18 06:44:58,259 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000000 -2017-02-18 06:44:58,266 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:44:58,272 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000000_0' done. -2017-02-18 06:44:58,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000000_0 -2017-02-18 06:44:58,274 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000001_0 -2017-02-18 06:44:58,287 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:44:58,288 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:44:58,289 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@390f65b9 -2017-02-18 06:44:58,309 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:44:58,328 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:44:58,356 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1980898 len: 1980902 to MEMORY -2017-02-18 06:44:58,408 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:44:58,388 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:44:58,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 -2017-02-18 06:44:58,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 3630499 len: 3630503 to MEMORY -2017-02-18 06:44:58,504 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3630499 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:44:58,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3630499, inMemoryMapOutputs.size() -> 2, commitMemory -> 1980898, usedMemory ->5611397 -2017-02-18 06:44:58,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 992056 len: 992060 to MEMORY -2017-02-18 06:44:58,544 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 992056 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:44:58,547 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 992056, inMemoryMapOutputs.size() -> 3, commitMemory -> 5611397, usedMemory ->6603453 -2017-02-18 06:44:58,549 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:44:58,550 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:44:58,550 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:44:58,553 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:44:58,553 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6603444 bytes -2017-02-18 06:44:59,409 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 06:45:01,414 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6603453 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6603453 bytes from disk -2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6603446 bytes -2017-02-18 06:45:01,424 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:04,301 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:04,421 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 06:45:04,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 06:45:04,770 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:04,771 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000001_0 is allowed to commit now -2017-02-18 06:45:04,772 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000001 -2017-02-18 06:45:04,795 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:04,795 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000001_0' done. -2017-02-18 06:45:04,796 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000001_0 -2017-02-18 06:45:04,796 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000002_0 -2017-02-18 06:45:04,821 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:04,823 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:04,823 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@20739903 -2017-02-18 06:45:04,834 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:04,848 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:04,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1199435 len: 1199439 to MEMORY -2017-02-18 06:45:04,894 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:04,928 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 -2017-02-18 06:45:04,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 4721951 len: 4721955 to MEMORY -2017-02-18 06:45:05,041 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4721951 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:05,054 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4721951, inMemoryMapOutputs.size() -> 2, commitMemory -> 1199435, usedMemory ->5921386 -2017-02-18 06:45:05,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 1178681 len: 1178685 to MEMORY -2017-02-18 06:45:05,079 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1178681 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:05,096 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1178681, inMemoryMapOutputs.size() -> 3, commitMemory -> 5921386, usedMemory ->7100067 -2017-02-18 06:45:05,098 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:05,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:05,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:05,102 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:05,103 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7100041 bytes -2017-02-18 06:45:07,803 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7100067 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:07,810 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7100067 bytes from disk -2017-02-18 06:45:07,810 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:07,810 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:07,811 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7100054 bytes -2017-02-18 06:45:07,815 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:10,294 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 06:45:10,298 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:10,299 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000002_0 is allowed to commit now -2017-02-18 06:45:10,299 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000002 -2017-02-18 06:45:10,312 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:10,318 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000002_0' done. -2017-02-18 06:45:10,319 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000002_0 -2017-02-18 06:45:10,319 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000003_0 -2017-02-18 06:45:10,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:10,329 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:10,330 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1b082505 -2017-02-18 06:45:10,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:10,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:10,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 914896 len: 914900 to MEMORY -2017-02-18 06:45:10,396 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:10,417 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 -2017-02-18 06:45:10,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2586992 len: 2586996 to MEMORY -2017-02-18 06:45:10,441 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:45:10,478 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2586992 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:10,481 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2586992, inMemoryMapOutputs.size() -> 2, commitMemory -> 914896, usedMemory ->3501888 -2017-02-18 06:45:10,487 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 750229 len: 750233 to MEMORY -2017-02-18 06:45:10,523 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 750229 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:10,524 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 750229, inMemoryMapOutputs.size() -> 3, commitMemory -> 3501888, usedMemory ->4252117 -2017-02-18 06:45:10,524 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:10,525 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:10,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:10,527 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:10,527 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4252081 bytes -2017-02-18 06:45:11,445 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 06:45:12,063 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4252117 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:12,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4252117 bytes from disk -2017-02-18 06:45:12,067 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:12,067 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:12,069 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4252100 bytes -2017-02-18 06:45:12,071 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:13,479 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 06:45:13,485 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:13,485 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000003_0 is allowed to commit now -2017-02-18 06:45:13,486 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000003 -2017-02-18 06:45:13,501 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:13,504 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000003_0' done. -2017-02-18 06:45:13,505 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000003_0 -2017-02-18 06:45:13,505 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000004_0 -2017-02-18 06:45:13,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:13,519 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:13,519 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7703d828 -2017-02-18 06:45:13,525 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:13,550 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:13,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1165904 len: 1165908 to MEMORY -2017-02-18 06:45:13,576 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:13,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 -2017-02-18 06:45:13,605 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 3465672 len: 3465676 to MEMORY -2017-02-18 06:45:13,684 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3465672 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:13,689 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3465672, inMemoryMapOutputs.size() -> 2, commitMemory -> 1165904, usedMemory ->4631576 -2017-02-18 06:45:13,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 962343 len: 962347 to MEMORY -2017-02-18 06:45:13,708 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 962343 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:13,723 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 962343, inMemoryMapOutputs.size() -> 3, commitMemory -> 4631576, usedMemory ->5593919 -2017-02-18 06:45:13,725 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:13,726 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:13,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:13,762 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:13,763 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5593898 bytes -2017-02-18 06:45:14,453 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 06:45:15,937 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5593919 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5593919 bytes from disk -2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5593911 bytes -2017-02-18 06:45:15,947 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:18,033 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 06:45:18,042 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:18,042 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000004_0 is allowed to commit now -2017-02-18 06:45:18,043 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000004 -2017-02-18 06:45:18,058 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:18,061 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000004_0' done. -2017-02-18 06:45:18,062 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000004_0 -2017-02-18 06:45:18,063 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000005_0 -2017-02-18 06:45:18,076 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:18,077 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:18,077 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7adb5354 -2017-02-18 06:45:18,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:18,109 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:18,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 867472 len: 867476 to MEMORY -2017-02-18 06:45:18,130 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:18,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 -2017-02-18 06:45:18,154 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2657280 len: 2657284 to MEMORY -2017-02-18 06:45:18,208 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2657280 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:18,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2657280, inMemoryMapOutputs.size() -> 2, commitMemory -> 867472, usedMemory ->3524752 -2017-02-18 06:45:18,222 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 725836 len: 725840 to MEMORY -2017-02-18 06:45:18,246 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 725836 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:18,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 725836, inMemoryMapOutputs.size() -> 3, commitMemory -> 3524752, usedMemory ->4250588 -2017-02-18 06:45:18,254 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:18,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:18,255 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:18,262 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:18,262 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4250568 bytes -2017-02-18 06:45:18,464 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% -2017-02-18 06:45:19,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4250588 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:19,758 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4250588 bytes from disk -2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4250579 bytes -2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:21,241 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 06:45:21,245 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:21,245 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000005_0 is allowed to commit now -2017-02-18 06:45:21,246 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000005 -2017-02-18 06:45:21,261 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:21,263 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000005_0' done. -2017-02-18 06:45:21,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000005_0 -2017-02-18 06:45:21,265 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000006_0 -2017-02-18 06:45:21,273 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:21,274 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:21,274 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f3ed32a -2017-02-18 06:45:21,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:21,318 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:21,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1080418 len: 1080422 to MEMORY -2017-02-18 06:45:21,339 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:21,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 -2017-02-18 06:45:21,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 3036363 len: 3036367 to MEMORY -2017-02-18 06:45:21,442 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3036363 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:21,453 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3036363, inMemoryMapOutputs.size() -> 2, commitMemory -> 1080418, usedMemory ->4116781 -2017-02-18 06:45:21,455 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 855513 len: 855517 to MEMORY -2017-02-18 06:45:21,472 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:45:21,482 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 855513 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:21,488 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 855513, inMemoryMapOutputs.size() -> 3, commitMemory -> 4116781, usedMemory ->4972294 -2017-02-18 06:45:21,491 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:21,492 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:21,493 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:21,494 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:21,494 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4972265 bytes -2017-02-18 06:45:22,480 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 06:45:23,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4972294 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:23,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4972294 bytes from disk -2017-02-18 06:45:23,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:23,369 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:23,370 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4972282 bytes -2017-02-18 06:45:23,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:25,125 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 06:45:25,151 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:25,152 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000006_0 is allowed to commit now -2017-02-18 06:45:25,154 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000006 -2017-02-18 06:45:25,159 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:25,162 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000006_0' done. -2017-02-18 06:45:25,164 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000006_0 -2017-02-18 06:45:25,165 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000007_0 -2017-02-18 06:45:25,174 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:25,174 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:25,179 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@fb8896c -2017-02-18 06:45:25,189 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:25,207 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:25,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 821532 len: 821536 to MEMORY -2017-02-18 06:45:25,244 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:25,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 -2017-02-18 06:45:25,254 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2552612 len: 2552616 to MEMORY -2017-02-18 06:45:25,323 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2552612 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:25,323 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2552612, inMemoryMapOutputs.size() -> 2, commitMemory -> 821532, usedMemory ->3374144 -2017-02-18 06:45:25,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 685165 len: 685169 to MEMORY -2017-02-18 06:45:25,349 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 685165 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:25,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 685165, inMemoryMapOutputs.size() -> 3, commitMemory -> 3374144, usedMemory ->4059309 -2017-02-18 06:45:25,367 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:25,369 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:25,369 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:25,372 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:25,372 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4059289 bytes -2017-02-18 06:45:25,490 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% -2017-02-18 06:45:26,915 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4059309 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:26,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4059309 bytes from disk -2017-02-18 06:45:26,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:26,917 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:26,918 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4059300 bytes -2017-02-18 06:45:26,918 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:28,258 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 06:45:28,294 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:28,301 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000007_0 is allowed to commit now -2017-02-18 06:45:28,303 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000007 -2017-02-18 06:45:28,313 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:28,316 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000007_0' done. -2017-02-18 06:45:28,316 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000007_0 -2017-02-18 06:45:28,317 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000008_0 -2017-02-18 06:45:28,323 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:28,324 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:28,324 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@10bd314a -2017-02-18 06:45:28,343 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:28,363 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:28,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1360514 len: 1360518 to MEMORY -2017-02-18 06:45:28,398 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:28,422 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 -2017-02-18 06:45:28,426 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 5197932 len: 5197936 to MEMORY -2017-02-18 06:45:28,495 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:45:28,533 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5197932 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:28,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5197932, inMemoryMapOutputs.size() -> 2, commitMemory -> 1360514, usedMemory ->6558446 -2017-02-18 06:45:28,547 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 1500234 len: 1500238 to MEMORY -2017-02-18 06:45:28,587 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1500234 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:28,600 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1500234, inMemoryMapOutputs.size() -> 3, commitMemory -> 6558446, usedMemory ->8058680 -2017-02-18 06:45:28,602 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:28,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:28,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:28,604 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:28,604 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8058660 bytes -2017-02-18 06:45:29,500 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 06:45:31,681 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8058680 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:31,683 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8058680 bytes from disk -2017-02-18 06:45:31,683 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:31,683 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:31,686 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8058671 bytes -2017-02-18 06:45:31,687 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:34,340 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:34,510 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% -2017-02-18 06:45:34,682 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 06:45:34,703 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:34,712 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000008_0 is allowed to commit now -2017-02-18 06:45:34,713 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000008 -2017-02-18 06:45:34,714 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:34,716 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000008_0' done. -2017-02-18 06:45:34,722 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000008_0 -2017-02-18 06:45:34,724 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000009_0 -2017-02-18 06:45:34,734 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:45:34,735 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:45:34,743 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@63c39669 -2017-02-18 06:45:34,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:45:34,772 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:45:34,796 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 835740 len: 835744 to MEMORY -2017-02-18 06:45:34,817 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local767302091_0001_m_000001_0 -2017-02-18 06:45:34,836 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 -2017-02-18 06:45:34,855 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2114762 len: 2114766 to MEMORY -2017-02-18 06:45:34,908 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2114762 bytes from map-output for attempt_local767302091_0001_m_000000_0 -2017-02-18 06:45:34,908 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2114762, inMemoryMapOutputs.size() -> 2, commitMemory -> 835740, usedMemory ->2950502 -2017-02-18 06:45:34,924 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 596065 len: 596069 to MEMORY -2017-02-18 06:45:34,938 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 596065 bytes from map-output for attempt_local767302091_0001_m_000002_0 -2017-02-18 06:45:34,943 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 596065, inMemoryMapOutputs.size() -> 3, commitMemory -> 2950502, usedMemory ->3546567 -2017-02-18 06:45:34,947 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:45:34,949 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:34,949 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:45:34,950 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:45:34,950 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3546539 bytes -2017-02-18 06:45:36,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3546567 bytes to disk to satisfy reduce memory limit -2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3546567 bytes from disk -2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3546555 bytes -2017-02-18 06:45:36,161 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:37,296 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 06:45:37,336 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:45:37,340 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000009_0 is allowed to commit now -2017-02-18 06:45:37,342 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000009 -2017-02-18 06:45:37,348 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:45:37,351 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000009_0' done. -2017-02-18 06:45:37,351 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000009_0 -2017-02-18 06:45:37,354 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 06:45:37,515 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:45:37,562 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local767302091_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 06:45:38,520 INFO org.apache.hadoop.mapreduce.Job: Job job_local767302091_0001 failed with state FAILED due to: NA -2017-02-18 06:45:38,725 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=921244792 - FILE: Number of bytes written=961214284 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=52996307 - Input split bytes=351 - Combine input records=0 - Combine output records=0 - Reduce input groups=217527 - Reduce shuffle bytes=52996307 - Reduce input records=4678719 - Reduce output records=124 - Spilled Records=9357438 - Shuffled Maps =30 - Failed Shuffles=0 - Merged Map outputs=30 - GC time elapsed (ms)=912 - Total committed heap usage (bytes)=2551959552 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=26057874 - File Output Format Counters - Bytes Written=896 -2017-02-18 06:53:36,039 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 06:53:38,565 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 06:53:38,610 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 06:53:40,517 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 06:53:40,600 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 06:53:41,038 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 06:53:42,458 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1538746324_0001 -2017-02-18 06:53:44,381 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 06:53:44,382 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1538746324_0001 -2017-02-18 06:53:44,411 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 06:53:44,463 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:53:44,480 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 06:53:44,911 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 06:53:44,913 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:53:45,156 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:53:45,269 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:53:45,281 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 06:53:45,415 INFO org.apache.hadoop.mapreduce.Job: Job job_local1538746324_0001 running in uber mode : false -2017-02-18 06:53:45,418 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 06:53:45,986 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:53:46,017 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:53:46,018 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:53:46,019 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:53:46,019 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:53:46,045 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:53:46,086 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:53:51,226 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:53:51,439 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% -2017-02-18 06:53:54,228 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:53:54,444 INFO org.apache.hadoop.mapreduce.Job: map 8% reduce 0% -2017-02-18 06:53:57,233 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:53:57,455 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% -2017-02-18 06:54:00,236 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:54:00,467 INFO org.apache.hadoop.mapreduce.Job: map 20% reduce 0% -2017-02-18 06:54:01,451 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 06:54:03,238 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:03,487 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 06:54:06,239 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:09,241 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:12,248 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:15,252 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:18,259 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:21,260 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:24,265 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:27,266 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:38,774 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:54:38,815 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 06:54:38,836 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:54:38,849 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_m_000000_0' done. -2017-02-18 06:54:38,849 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:54:38,850 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:54:38,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:54:38,860 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:54:38,880 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 06:54:39,218 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:54:39,220 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:54:39,221 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:54:39,221 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:54:39,221 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:54:39,228 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:54:39,233 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:54:39,608 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:54:43,504 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 06:54:43,523 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:54:43,525 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:54:43,525 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 06:54:43,525 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 06:54:43,638 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 06:54:44,896 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:45,644 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 06:54:47,899 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:50,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:54:53,856 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:54:53,866 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 06:54:53,881 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:54:53,886 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_m_000001_0' done. -2017-02-18 06:54:53,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:54:53,888 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:54:53,896 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:54:53,897 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:54:53,906 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 06:54:54,305 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 06:54:54,327 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 06:54:54,328 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 06:54:54,329 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 06:54:54,329 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 06:54:54,339 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 06:54:54,342 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 06:54:54,668 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:54:58,024 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 06:54:58,036 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 06:54:58,037 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 06:54:58,037 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 06:54:58,037 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 06:54:58,680 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 06:54:59,916 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:55:00,688 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 06:55:02,919 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:55:05,920 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 06:55:06,114 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 06:55:06,133 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 06:55:06,139 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 06:55:06,145 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_m_000002_0' done. -2017-02-18 06:55:06,148 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:06,150 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 06:55:06,268 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 06:55:06,277 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000000_0 -2017-02-18 06:55:06,360 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:06,361 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:06,394 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41350279 -2017-02-18 06:55:06,569 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:06,596 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:06,720 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 06:55:06,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 881571 len: 881575 to MEMORY -2017-02-18 06:55:07,005 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 881571 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:07,025 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 881571, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->881571 -2017-02-18 06:55:07,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 991747 len: 991751 to MEMORY -2017-02-18 06:55:07,062 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:07,084 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 2, commitMemory -> 881571, usedMemory ->1873318 -2017-02-18 06:55:07,097 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2685875 len: 2685879 to MEMORY -2017-02-18 06:55:07,203 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2685875 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:07,214 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2685875, inMemoryMapOutputs.size() -> 3, commitMemory -> 1873318, usedMemory ->4559193 -2017-02-18 06:55:07,218 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:07,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:07,221 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:07,284 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:07,285 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4559151 bytes -2017-02-18 06:55:09,501 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4559193 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:09,503 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4559193 bytes from disk -2017-02-18 06:55:09,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:09,517 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:09,518 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4559175 bytes -2017-02-18 06:55:09,526 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:09,675 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 06:55:12,380 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:12,495 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 06:55:12,524 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:12,525 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000000_0 is allowed to commit now -2017-02-18 06:55:12,531 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000000 -2017-02-18 06:55:12,540 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:12,543 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000000_0' done. -2017-02-18 06:55:12,544 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000000_0 -2017-02-18 06:55:12,544 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000001_0 -2017-02-18 06:55:12,575 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:12,576 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:12,576 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5ba21282 -2017-02-18 06:55:12,585 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:12,599 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:12,609 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 992056 len: 992060 to MEMORY -2017-02-18 06:55:12,636 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 992056 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:12,669 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 992056, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->992056 -2017-02-18 06:55:12,687 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1980898 len: 1980902 to MEMORY -2017-02-18 06:55:12,726 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:12,736 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:55:12,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 2, commitMemory -> 992056, usedMemory ->2972954 -2017-02-18 06:55:12,754 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 3630499 len: 3630503 to MEMORY -2017-02-18 06:55:12,907 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3630499 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:12,910 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3630499, inMemoryMapOutputs.size() -> 3, commitMemory -> 2972954, usedMemory ->6603453 -2017-02-18 06:55:12,911 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:12,913 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:12,913 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:12,914 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:12,915 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6603444 bytes -2017-02-18 06:55:13,751 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 06:55:15,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6603453 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6603453 bytes from disk -2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6603446 bytes -2017-02-18 06:55:15,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:18,589 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:18,762 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 06:55:18,944 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 06:55:18,946 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:18,957 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000001_0 is allowed to commit now -2017-02-18 06:55:18,959 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000001 -2017-02-18 06:55:18,967 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:18,969 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000001_0' done. -2017-02-18 06:55:18,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000001_0 -2017-02-18 06:55:18,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000002_0 -2017-02-18 06:55:18,999 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:19,000 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:19,001 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2455dbb6 -2017-02-18 06:55:19,016 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:19,037 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:19,050 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 1178681 len: 1178685 to MEMORY -2017-02-18 06:55:19,075 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1178681 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:19,109 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1178681, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1178681 -2017-02-18 06:55:19,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1199435 len: 1199439 to MEMORY -2017-02-18 06:55:19,156 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:19,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 2, commitMemory -> 1178681, usedMemory ->2378116 -2017-02-18 06:55:19,186 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 4721951 len: 4721955 to MEMORY -2017-02-18 06:55:19,372 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4721951 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:19,393 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4721951, inMemoryMapOutputs.size() -> 3, commitMemory -> 2378116, usedMemory ->7100067 -2017-02-18 06:55:19,393 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:19,394 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:19,394 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:19,398 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:19,398 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7100041 bytes -2017-02-18 06:55:22,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7100067 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:22,371 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7100067 bytes from disk -2017-02-18 06:55:22,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:22,388 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:22,389 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7100054 bytes -2017-02-18 06:55:22,389 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:25,015 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:25,784 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 29% -2017-02-18 06:55:26,062 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 06:55:26,095 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:26,101 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000002_0 is allowed to commit now -2017-02-18 06:55:26,103 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000002 -2017-02-18 06:55:26,111 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:26,113 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000002_0' done. -2017-02-18 06:55:26,114 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000002_0 -2017-02-18 06:55:26,114 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000003_0 -2017-02-18 06:55:26,127 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:26,128 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:26,128 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5602c43c -2017-02-18 06:55:26,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:26,159 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:26,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 750229 len: 750233 to MEMORY -2017-02-18 06:55:26,184 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 750229 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:26,208 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 750229, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->750229 -2017-02-18 06:55:26,231 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 914896 len: 914900 to MEMORY -2017-02-18 06:55:26,235 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:26,258 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 2, commitMemory -> 750229, usedMemory ->1665125 -2017-02-18 06:55:26,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2586992 len: 2586996 to MEMORY -2017-02-18 06:55:26,346 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2586992 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:26,376 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2586992, inMemoryMapOutputs.size() -> 3, commitMemory -> 1665125, usedMemory ->4252117 -2017-02-18 06:55:26,377 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:26,378 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:26,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:26,379 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:26,380 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4252081 bytes -2017-02-18 06:55:26,788 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 06:55:27,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4252117 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:27,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4252117 bytes from disk -2017-02-18 06:55:27,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:27,990 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:27,991 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4252100 bytes -2017-02-18 06:55:27,991 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:29,337 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 06:55:29,345 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:29,345 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000003_0 is allowed to commit now -2017-02-18 06:55:29,346 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000003 -2017-02-18 06:55:29,359 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:29,364 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000003_0' done. -2017-02-18 06:55:29,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000003_0 -2017-02-18 06:55:29,366 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000004_0 -2017-02-18 06:55:29,378 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:29,379 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:29,380 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2ee007c9 -2017-02-18 06:55:29,394 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:29,415 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:29,427 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 962343 len: 962347 to MEMORY -2017-02-18 06:55:29,458 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 962343 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:29,467 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 962343, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->962343 -2017-02-18 06:55:29,471 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1165904 len: 1165908 to MEMORY -2017-02-18 06:55:29,513 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:29,513 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 2, commitMemory -> 962343, usedMemory ->2128247 -2017-02-18 06:55:29,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 3465672 len: 3465676 to MEMORY -2017-02-18 06:55:29,667 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3465672 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:29,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3465672, inMemoryMapOutputs.size() -> 3, commitMemory -> 2128247, usedMemory ->5593919 -2017-02-18 06:55:29,680 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:29,681 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:29,681 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:29,685 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:29,685 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5593898 bytes -2017-02-18 06:55:29,797 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 06:55:31,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5593919 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5593919 bytes from disk -2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5593911 bytes -2017-02-18 06:55:31,707 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:33,710 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 06:55:33,726 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:33,728 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000004_0 is allowed to commit now -2017-02-18 06:55:33,730 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000004 -2017-02-18 06:55:33,735 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:33,741 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000004_0' done. -2017-02-18 06:55:33,742 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000004_0 -2017-02-18 06:55:33,743 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000005_0 -2017-02-18 06:55:33,750 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:33,751 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:33,752 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@51080bce -2017-02-18 06:55:33,765 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:33,784 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:33,805 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 725836 len: 725840 to MEMORY -2017-02-18 06:55:33,809 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:55:33,815 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 725836 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:33,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 725836, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->725836 -2017-02-18 06:55:33,831 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 867472 len: 867476 to MEMORY -2017-02-18 06:55:33,857 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:33,861 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 2, commitMemory -> 725836, usedMemory ->1593308 -2017-02-18 06:55:33,880 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2657280 len: 2657284 to MEMORY -2017-02-18 06:55:33,973 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2657280 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:34,001 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2657280, inMemoryMapOutputs.size() -> 3, commitMemory -> 1593308, usedMemory ->4250588 -2017-02-18 06:55:34,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:34,004 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:34,004 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:34,006 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:34,006 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4250568 bytes -2017-02-18 06:55:34,812 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% -2017-02-18 06:55:35,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4250588 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:35,361 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4250588 bytes from disk -2017-02-18 06:55:35,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:35,366 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:35,369 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4250579 bytes -2017-02-18 06:55:35,370 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:36,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 06:55:36,771 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:36,771 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000005_0 is allowed to commit now -2017-02-18 06:55:36,783 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000005 -2017-02-18 06:55:36,787 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:36,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000005_0' done. -2017-02-18 06:55:36,794 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000005_0 -2017-02-18 06:55:36,794 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000006_0 -2017-02-18 06:55:36,803 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:36,804 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:36,808 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@17d602ac -2017-02-18 06:55:36,815 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:55:36,831 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:36,847 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:36,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 855513 len: 855517 to MEMORY -2017-02-18 06:55:36,868 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 855513 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:36,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 855513, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->855513 -2017-02-18 06:55:36,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1080418 len: 1080422 to MEMORY -2017-02-18 06:55:36,939 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:36,943 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 2, commitMemory -> 855513, usedMemory ->1935931 -2017-02-18 06:55:36,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 3036363 len: 3036367 to MEMORY -2017-02-18 06:55:37,056 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3036363 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:37,072 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3036363, inMemoryMapOutputs.size() -> 3, commitMemory -> 1935931, usedMemory ->4972294 -2017-02-18 06:55:37,073 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:37,074 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:37,075 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:37,077 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:37,078 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4972265 bytes -2017-02-18 06:55:37,818 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 06:55:38,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4972294 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:38,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4972294 bytes from disk -2017-02-18 06:55:38,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:38,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:38,755 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4972282 bytes -2017-02-18 06:55:38,757 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:40,406 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 06:55:40,413 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:40,413 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000006_0 is allowed to commit now -2017-02-18 06:55:40,414 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000006 -2017-02-18 06:55:40,427 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:40,430 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000006_0' done. -2017-02-18 06:55:40,431 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000006_0 -2017-02-18 06:55:40,432 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000007_0 -2017-02-18 06:55:40,440 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:40,441 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:40,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4258f598 -2017-02-18 06:55:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:40,480 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:40,490 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 685165 len: 685169 to MEMORY -2017-02-18 06:55:40,501 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 685165 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:40,513 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 685165, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->685165 -2017-02-18 06:55:40,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 821532 len: 821536 to MEMORY -2017-02-18 06:55:40,566 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:40,566 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 2, commitMemory -> 685165, usedMemory ->1506697 -2017-02-18 06:55:40,592 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2552612 len: 2552616 to MEMORY -2017-02-18 06:55:40,694 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2552612 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:40,721 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2552612, inMemoryMapOutputs.size() -> 3, commitMemory -> 1506697, usedMemory ->4059309 -2017-02-18 06:55:40,724 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:40,725 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:40,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:40,727 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:40,727 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4059289 bytes -2017-02-18 06:55:40,824 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% -2017-02-18 06:55:42,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4059309 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:42,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4059309 bytes from disk -2017-02-18 06:55:42,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:42,173 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:42,174 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4059300 bytes -2017-02-18 06:55:42,174 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:43,477 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 06:55:43,510 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:43,514 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000007_0 is allowed to commit now -2017-02-18 06:55:43,515 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000007 -2017-02-18 06:55:43,522 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:43,526 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000007_0' done. -2017-02-18 06:55:43,528 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000007_0 -2017-02-18 06:55:43,531 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000008_0 -2017-02-18 06:55:43,541 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:43,542 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:43,549 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@39a3779e -2017-02-18 06:55:43,562 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:43,580 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:43,602 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 1500234 len: 1500238 to MEMORY -2017-02-18 06:55:43,638 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1500234 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:43,663 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1500234, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1500234 -2017-02-18 06:55:43,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1360514 len: 1360518 to MEMORY -2017-02-18 06:55:43,709 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:43,723 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 2, commitMemory -> 1500234, usedMemory ->2860748 -2017-02-18 06:55:43,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 5197932 len: 5197936 to MEMORY -2017-02-18 06:55:43,829 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:55:43,987 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5197932 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:43,995 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5197932, inMemoryMapOutputs.size() -> 3, commitMemory -> 2860748, usedMemory ->8058680 -2017-02-18 06:55:43,997 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:43,998 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:43,998 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:44,001 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:44,001 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8058660 bytes -2017-02-18 06:55:44,838 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 06:55:46,940 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8058680 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:46,942 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8058680 bytes from disk -2017-02-18 06:55:46,942 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:46,942 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:46,943 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8058671 bytes -2017-02-18 06:55:46,943 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:49,562 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:49,834 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 06:55:49,851 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:49,852 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% -2017-02-18 06:55:49,854 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000008_0 is allowed to commit now -2017-02-18 06:55:49,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000008 -2017-02-18 06:55:49,872 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:49,875 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000008_0' done. -2017-02-18 06:55:49,876 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000008_0 -2017-02-18 06:55:49,876 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000009_0 -2017-02-18 06:55:49,885 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 06:55:49,885 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 06:55:49,886 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@552a2a7d -2017-02-18 06:55:49,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 06:55:49,910 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 06:55:49,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 596065 len: 596069 to MEMORY -2017-02-18 06:55:49,964 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 596065 bytes from map-output for attempt_local1538746324_0001_m_000002_0 -2017-02-18 06:55:49,966 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 596065, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->596065 -2017-02-18 06:55:49,981 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 835740 len: 835744 to MEMORY -2017-02-18 06:55:50,026 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local1538746324_0001_m_000001_0 -2017-02-18 06:55:50,043 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 2, commitMemory -> 596065, usedMemory ->1431805 -2017-02-18 06:55:50,046 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2114762 len: 2114766 to MEMORY -2017-02-18 06:55:50,167 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2114762 bytes from map-output for attempt_local1538746324_0001_m_000000_0 -2017-02-18 06:55:50,184 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2114762, inMemoryMapOutputs.size() -> 3, commitMemory -> 1431805, usedMemory ->3546567 -2017-02-18 06:55:50,185 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 06:55:50,185 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:50,185 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 06:55:50,187 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 06:55:50,187 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3546539 bytes -2017-02-18 06:55:51,273 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3546567 bytes to disk to satisfy reduce memory limit -2017-02-18 06:55:51,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3546567 bytes from disk -2017-02-18 06:55:51,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 06:55:51,275 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 06:55:51,276 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3546555 bytes -2017-02-18 06:55:51,276 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:52,418 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 06:55:52,426 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 06:55:52,426 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000009_0 is allowed to commit now -2017-02-18 06:55:52,427 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000009 -2017-02-18 06:55:52,441 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 06:55:52,443 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000009_0' done. -2017-02-18 06:55:52,443 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000009_0 -2017-02-18 06:55:52,444 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 06:55:52,659 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1538746324_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 06:55:52,861 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 06:55:52,861 INFO org.apache.hadoop.mapreduce.Job: Job job_local1538746324_0001 failed with state FAILED due to: NA -2017-02-18 06:55:53,172 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=921244792 - FILE: Number of bytes written=961235798 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=52996307 - Input split bytes=351 - Combine input records=0 - Combine output records=0 - Reduce input groups=217527 - Reduce shuffle bytes=52996307 - Reduce input records=4678719 - Reduce output records=124 - Spilled Records=9357438 - Shuffled Maps =30 - Failed Shuffles=0 - Merged Map outputs=30 - GC time elapsed (ms)=817 - Total committed heap usage (bytes)=2551959552 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=26057874 - File Output Format Counters - Bytes Written=1326 -2017-02-18 07:01:48,434 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 07:01:50,939 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 07:01:50,976 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 07:01:53,187 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 07:01:53,292 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 07:01:53,850 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 07:01:55,706 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1461620831_0001 -2017-02-18 07:01:57,496 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 07:01:57,502 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1461620831_0001 -2017-02-18 07:01:57,521 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 07:01:57,596 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:01:57,610 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 07:01:58,116 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 07:01:58,117 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:01:58,404 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:01:58,524 INFO org.apache.hadoop.mapreduce.Job: Job job_local1461620831_0001 running in uber mode : false -2017-02-18 07:01:58,528 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 07:01:58,537 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:01:58,566 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 07:01:59,639 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:01:59,671 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:01:59,717 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:02:04,512 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:02:04,545 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% -2017-02-18 07:02:07,525 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:02:07,555 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% -2017-02-18 07:02:10,531 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:02:10,565 INFO org.apache.hadoop.mapreduce.Job: map 12% reduce 0% -2017-02-18 07:02:13,544 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:02:13,625 INFO org.apache.hadoop.mapreduce.Job: map 18% reduce 0% -2017-02-18 07:02:15,477 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:02:15,479 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:02:15,479 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:02:15,479 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 07:02:15,480 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 07:02:16,545 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:16,633 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 07:02:19,549 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:22,555 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:25,558 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:28,561 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:31,563 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:34,564 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:37,566 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:40,570 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:50,701 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:02:50,770 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 07:02:50,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:02:50,808 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_m_000000_0' done. -2017-02-18 07:02:50,810 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:02:50,811 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:02:50,818 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:02:50,819 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:02:50,829 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 07:02:51,218 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:02:51,225 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:02:51,225 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:02:51,226 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:02:51,226 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:02:51,235 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:02:51,256 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:02:51,743 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:02:55,446 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 07:02:55,769 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 07:02:56,862 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:02:57,773 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 07:02:59,873 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:03:02,880 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:03:05,883 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:03:06,047 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:03:06,067 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 07:03:06,078 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:03:06,086 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_m_000001_0' done. -2017-02-18 07:03:06,086 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:06,087 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:06,100 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:06,101 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:06,110 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 07:03:06,563 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:03:06,567 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:03:06,568 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:03:06,576 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:03:06,577 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:03:06,582 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:03:06,592 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:03:06,803 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:03:09,718 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 07:03:09,808 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 07:03:12,116 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:03:12,816 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 07:03:15,123 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:03:17,874 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:03:17,897 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 07:03:17,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:03:17,914 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_m_000002_0' done. -2017-02-18 07:03:17,915 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:17,916 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 07:03:18,018 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 07:03:18,018 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000000_0 -2017-02-18 07:03:18,087 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:18,094 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:18,147 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@956ff4d -2017-02-18 07:03:18,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:18,338 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:18,686 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 878989 len: 878993 to MEMORY -2017-02-18 07:03:18,769 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 878989 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:18,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 878989, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->878989 -2017-02-18 07:03:18,832 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:03:18,836 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 864040 len: 864044 to MEMORY -2017-02-18 07:03:18,839 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 864040 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:18,860 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 864040, inMemoryMapOutputs.size() -> 2, commitMemory -> 878989, usedMemory ->1743029 -2017-02-18 07:03:18,864 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2462807 len: 2462811 to MEMORY -2017-02-18 07:03:18,986 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2462807 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:18,987 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2462807, inMemoryMapOutputs.size() -> 3, commitMemory -> 1743029, usedMemory ->4205836 -2017-02-18 07:03:18,988 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:18,989 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:18,989 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:19,037 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:19,057 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4205810 bytes -2017-02-18 07:03:21,717 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4205836 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:21,718 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4205836 bytes from disk -2017-02-18 07:03:21,736 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:21,737 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:21,737 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4205821 bytes -2017-02-18 07:03:21,738 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:21,880 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 07:03:24,107 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:24,620 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 07:03:24,634 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:24,638 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000000_0 is allowed to commit now -2017-02-18 07:03:24,641 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000000 -2017-02-18 07:03:24,647 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:24,654 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000000_0' done. -2017-02-18 07:03:24,655 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000000_0 -2017-02-18 07:03:24,656 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000001_0 -2017-02-18 07:03:24,669 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:24,670 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:24,670 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22b1221b -2017-02-18 07:03:24,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:24,697 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:24,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 1021001 len: 1021005 to MEMORY -2017-02-18 07:03:24,731 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1021001 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:24,759 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1021001, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1021001 -2017-02-18 07:03:24,781 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 2021094 len: 2021098 to MEMORY -2017-02-18 07:03:24,835 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2021094 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:24,836 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2021094, inMemoryMapOutputs.size() -> 2, commitMemory -> 1021001, usedMemory ->3042095 -2017-02-18 07:03:24,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 3748098 len: 3748102 to MEMORY -2017-02-18 07:03:24,858 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:03:25,029 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3748098 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:25,054 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3748098, inMemoryMapOutputs.size() -> 3, commitMemory -> 3042095, usedMemory ->6790193 -2017-02-18 07:03:25,056 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:25,057 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:25,057 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:25,058 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:25,059 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6790184 bytes -2017-02-18 07:03:25,868 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 07:03:27,760 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6790193 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:27,763 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6790193 bytes from disk -2017-02-18 07:03:27,763 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:27,763 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:27,769 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6790186 bytes -2017-02-18 07:03:27,772 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:30,546 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 07:03:30,557 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:30,557 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000001_0 is allowed to commit now -2017-02-18 07:03:30,558 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000001 -2017-02-18 07:03:30,568 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:30,574 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000001_0' done. -2017-02-18 07:03:30,575 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000001_0 -2017-02-18 07:03:30,575 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000002_0 -2017-02-18 07:03:30,588 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:30,589 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:30,590 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@16655c61 -2017-02-18 07:03:30,599 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:30,619 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:30,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 1216330 len: 1216334 to MEMORY -2017-02-18 07:03:30,658 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1216330 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:30,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1216330, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1216330 -2017-02-18 07:03:30,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1268271 len: 1268275 to MEMORY -2017-02-18 07:03:30,698 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1268271 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:30,722 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1268271, inMemoryMapOutputs.size() -> 2, commitMemory -> 1216330, usedMemory ->2484601 -2017-02-18 07:03:30,734 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 4957197 len: 4957201 to MEMORY -2017-02-18 07:03:30,879 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:03:30,959 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4957197 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:30,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4957197, inMemoryMapOutputs.size() -> 3, commitMemory -> 2484601, usedMemory ->7441798 -2017-02-18 07:03:30,973 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:30,974 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:30,974 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:30,977 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:30,977 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7441769 bytes -2017-02-18 07:03:31,882 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 07:03:33,615 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7441798 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:33,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7441798 bytes from disk -2017-02-18 07:03:33,623 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:33,623 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:33,629 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7441780 bytes -2017-02-18 07:03:33,632 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:36,306 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 07:03:36,325 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:36,333 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000002_0 is allowed to commit now -2017-02-18 07:03:36,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000002 -2017-02-18 07:03:36,341 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:36,343 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000002_0' done. -2017-02-18 07:03:36,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000002_0 -2017-02-18 07:03:36,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000003_0 -2017-02-18 07:03:36,363 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:36,364 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:36,364 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@63cf2179 -2017-02-18 07:03:36,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:36,396 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:36,410 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 686549 len: 686553 to MEMORY -2017-02-18 07:03:36,418 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 686549 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:36,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 686549, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->686549 -2017-02-18 07:03:36,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 947339 len: 947343 to MEMORY -2017-02-18 07:03:36,490 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 947339 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:36,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 947339, inMemoryMapOutputs.size() -> 2, commitMemory -> 686549, usedMemory ->1633888 -2017-02-18 07:03:36,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2553004 len: 2553008 to MEMORY -2017-02-18 07:03:36,707 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2553004 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:36,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2553004, inMemoryMapOutputs.size() -> 3, commitMemory -> 1633888, usedMemory ->4186892 -2017-02-18 07:03:36,721 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:36,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:36,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:36,728 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:36,728 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4186872 bytes -2017-02-18 07:03:36,892 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 07:03:38,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4186892 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:38,215 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4186892 bytes from disk -2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4186881 bytes -2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:39,608 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 07:03:39,617 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:39,617 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000003_0 is allowed to commit now -2017-02-18 07:03:39,632 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000003 -2017-02-18 07:03:39,636 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:39,638 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000003_0' done. -2017-02-18 07:03:39,639 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000003_0 -2017-02-18 07:03:39,639 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000004_0 -2017-02-18 07:03:39,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:39,652 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:39,652 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2296ff51 -2017-02-18 07:03:39,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:39,690 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:39,704 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 853557 len: 853561 to MEMORY -2017-02-18 07:03:39,714 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 853557 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:39,740 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 853557, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->853557 -2017-02-18 07:03:39,755 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1002557 len: 1002561 to MEMORY -2017-02-18 07:03:39,777 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1002557 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:39,780 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1002557, inMemoryMapOutputs.size() -> 2, commitMemory -> 853557, usedMemory ->1856114 -2017-02-18 07:03:39,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 3061786 len: 3061790 to MEMORY -2017-02-18 07:03:39,900 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:03:39,941 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3061786 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:39,949 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3061786, inMemoryMapOutputs.size() -> 3, commitMemory -> 1856114, usedMemory ->4917900 -2017-02-18 07:03:39,959 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:39,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:39,960 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:39,963 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:39,963 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4917879 bytes -2017-02-18 07:03:40,901 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 07:03:41,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4917900 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:41,756 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4917900 bytes from disk -2017-02-18 07:03:41,757 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:41,758 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:41,760 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4917892 bytes -2017-02-18 07:03:41,762 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:43,477 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 07:03:43,485 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:43,485 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000004_0 is allowed to commit now -2017-02-18 07:03:43,486 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000004 -2017-02-18 07:03:43,496 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:43,503 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000004_0' done. -2017-02-18 07:03:43,504 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000004_0 -2017-02-18 07:03:43,505 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000005_0 -2017-02-18 07:03:43,510 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:43,512 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:43,521 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@347e12a -2017-02-18 07:03:43,529 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:43,550 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:43,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 767280 len: 767284 to MEMORY -2017-02-18 07:03:43,573 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 767280 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:43,588 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 767280, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->767280 -2017-02-18 07:03:43,600 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 894060 len: 894064 to MEMORY -2017-02-18 07:03:43,638 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 894060 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:43,653 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 894060, inMemoryMapOutputs.size() -> 2, commitMemory -> 767280, usedMemory ->1661340 -2017-02-18 07:03:43,656 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2671328 len: 2671332 to MEMORY -2017-02-18 07:03:43,774 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2671328 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:43,783 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2671328, inMemoryMapOutputs.size() -> 3, commitMemory -> 1661340, usedMemory ->4332668 -2017-02-18 07:03:43,788 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:43,789 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:43,789 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:43,793 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:43,794 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4332653 bytes -2017-02-18 07:03:43,911 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% -2017-02-18 07:03:45,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4332668 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4332668 bytes from disk -2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4332659 bytes -2017-02-18 07:03:45,370 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:46,909 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 07:03:46,936 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:46,944 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000005_0 is allowed to commit now -2017-02-18 07:03:46,946 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000005 -2017-02-18 07:03:46,951 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:46,956 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000005_0' done. -2017-02-18 07:03:46,957 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000005_0 -2017-02-18 07:03:46,958 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000006_0 -2017-02-18 07:03:46,973 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:46,974 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:46,974 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@befc7b6 -2017-02-18 07:03:46,980 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:46,995 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:47,012 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 967027 len: 967031 to MEMORY -2017-02-18 07:03:47,033 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 967027 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:47,043 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 967027, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->967027 -2017-02-18 07:03:47,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1152263 len: 1152267 to MEMORY -2017-02-18 07:03:47,090 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1152263 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:47,115 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1152263, inMemoryMapOutputs.size() -> 2, commitMemory -> 967027, usedMemory ->2119290 -2017-02-18 07:03:47,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 3388337 len: 3388341 to MEMORY -2017-02-18 07:03:47,270 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3388337 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:47,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3388337, inMemoryMapOutputs.size() -> 3, commitMemory -> 2119290, usedMemory ->5507627 -2017-02-18 07:03:47,277 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:47,278 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:47,279 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:47,280 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:47,280 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5507601 bytes -2017-02-18 07:03:47,922 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 07:03:49,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5507627 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:49,302 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5507627 bytes from disk -2017-02-18 07:03:49,302 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:49,305 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:49,307 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5507615 bytes -2017-02-18 07:03:49,311 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:51,240 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 07:03:51,288 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:51,293 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000006_0 is allowed to commit now -2017-02-18 07:03:51,296 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000006 -2017-02-18 07:03:51,301 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:51,307 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000006_0' done. -2017-02-18 07:03:51,308 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000006_0 -2017-02-18 07:03:51,309 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000007_0 -2017-02-18 07:03:51,319 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:51,321 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:51,321 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@d62d2df -2017-02-18 07:03:51,328 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:51,352 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:51,364 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 710901 len: 710905 to MEMORY -2017-02-18 07:03:51,373 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 710901 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:51,393 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 710901, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->710901 -2017-02-18 07:03:51,396 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 824970 len: 824974 to MEMORY -2017-02-18 07:03:51,441 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 824970 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:51,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 824970, inMemoryMapOutputs.size() -> 2, commitMemory -> 710901, usedMemory ->1535871 -2017-02-18 07:03:51,473 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2633768 len: 2633772 to MEMORY -2017-02-18 07:03:51,589 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2633768 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:51,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2633768, inMemoryMapOutputs.size() -> 3, commitMemory -> 1535871, usedMemory ->4169639 -2017-02-18 07:03:51,611 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:51,612 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:51,612 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:51,613 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:51,614 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4169616 bytes -2017-02-18 07:03:51,941 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% -2017-02-18 07:03:53,067 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4169639 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:53,070 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4169639 bytes from disk -2017-02-18 07:03:53,070 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:53,070 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:53,076 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4169630 bytes -2017-02-18 07:03:53,078 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:54,425 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 07:03:54,436 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:54,436 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000007_0 is allowed to commit now -2017-02-18 07:03:54,437 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000007 -2017-02-18 07:03:54,458 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:03:54,458 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000007_0' done. -2017-02-18 07:03:54,462 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000007_0 -2017-02-18 07:03:54,462 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000008_0 -2017-02-18 07:03:54,483 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:03:54,484 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:03:54,484 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7af45a93 -2017-02-18 07:03:54,502 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:03:54,522 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:03:54,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 1462439 len: 1462443 to MEMORY -2017-02-18 07:03:54,578 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1462439 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:03:54,597 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1462439, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1462439 -2017-02-18 07:03:54,600 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1510887 len: 1510891 to MEMORY -2017-02-18 07:03:54,643 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1510887 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:03:54,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1510887, inMemoryMapOutputs.size() -> 2, commitMemory -> 1462439, usedMemory ->2973326 -2017-02-18 07:03:54,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 5237666 len: 5237670 to MEMORY -2017-02-18 07:03:54,885 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5237666 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:03:54,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5237666, inMemoryMapOutputs.size() -> 3, commitMemory -> 2973326, usedMemory ->8210992 -2017-02-18 07:03:54,899 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:03:54,900 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:03:54,900 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:03:54,912 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:03:54,912 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8210965 bytes -2017-02-18 07:03:54,950 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 07:03:58,179 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8210992 bytes to disk to satisfy reduce memory limit -2017-02-18 07:03:58,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8210992 bytes from disk -2017-02-18 07:03:58,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:03:58,182 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:03:58,191 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8210983 bytes -2017-02-18 07:03:58,192 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:04:00,532 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:04:00,970 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 89% -2017-02-18 07:04:01,265 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 07:04:01,282 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:04:01,284 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000008_0 is allowed to commit now -2017-02-18 07:04:01,285 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000008 -2017-02-18 07:04:01,292 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:04:01,295 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000008_0' done. -2017-02-18 07:04:01,295 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000008_0 -2017-02-18 07:04:01,296 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000009_0 -2017-02-18 07:04:01,311 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:04:01,312 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:04:01,312 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3cec8446 -2017-02-18 07:04:01,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:04:01,334 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:04:01,349 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 563620 len: 563624 to MEMORY -2017-02-18 07:04:01,373 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 563620 bytes from map-output for attempt_local1461620831_0001_m_000002_0 -2017-02-18 07:04:01,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 563620, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->563620 -2017-02-18 07:04:01,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 733075 len: 733079 to MEMORY -2017-02-18 07:04:01,425 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 733075 bytes from map-output for attempt_local1461620831_0001_m_000001_0 -2017-02-18 07:04:01,430 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 733075, inMemoryMapOutputs.size() -> 2, commitMemory -> 563620, usedMemory ->1296695 -2017-02-18 07:04:01,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 1935947 len: 1935951 to MEMORY -2017-02-18 07:04:01,517 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1935947 bytes from map-output for attempt_local1461620831_0001_m_000000_0 -2017-02-18 07:04:01,535 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1935947, inMemoryMapOutputs.size() -> 3, commitMemory -> 1296695, usedMemory ->3232642 -2017-02-18 07:04:01,537 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:04:01,538 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:04:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:04:01,539 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:04:01,539 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3232620 bytes -2017-02-18 07:04:01,973 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% -2017-02-18 07:04:02,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3232642 bytes to disk to satisfy reduce memory limit -2017-02-18 07:04:02,660 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3232642 bytes from disk -2017-02-18 07:04:02,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:04:02,666 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:04:02,668 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3232631 bytes -2017-02-18 07:04:02,670 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:04:03,658 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 07:04:03,693 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:04:03,696 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000009_0 is allowed to commit now -2017-02-18 07:04:03,697 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000009 -2017-02-18 07:04:03,705 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:04:03,710 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000009_0' done. -2017-02-18 07:04:03,711 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000009_0 -2017-02-18 07:04:03,712 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 07:04:03,939 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1461620831_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 07:04:03,980 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:04:03,980 INFO org.apache.hadoop.mapreduce.Job: Job job_local1461620831_0001 failed with state FAILED due to: NA -2017-02-18 07:04:04,376 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=919728132 - FILE: Number of bytes written=960477658 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=52996307 - Input split bytes=351 - Combine input records=0 - Combine output records=0 - Reduce input groups=195336 - Reduce shuffle bytes=52996307 - Reduce input records=4678719 - Reduce output records=119 - Spilled Records=9357438 - Shuffled Maps =30 - Failed Shuffles=0 - Merged Map outputs=30 - GC time elapsed (ms)=926 - Total committed heap usage (bytes)=2551959552 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=26057874 - File Output Format Counters - Bytes Written=1297 -2017-02-18 07:05:58,046 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 07:06:00,716 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 07:06:00,740 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 07:06:02,868 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 07:06:02,922 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 -2017-02-18 07:06:03,338 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 -2017-02-18 07:06:04,825 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1414308832_0001 -2017-02-18 07:06:06,783 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 07:06:06,802 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1414308832_0001 -2017-02-18 07:06:06,814 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 07:06:06,956 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:06,976 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 07:06:07,402 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 07:06:07,403 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:07,695 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:07,806 INFO org.apache.hadoop.mapreduce.Job: Job job_local1414308832_0001 running in uber mode : false -2017-02-18 07:06:07,810 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 07:06:07,871 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:07,877 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 07:06:08,639 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:06:08,642 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:06:08,642 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:06:08,643 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:06:08,643 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:06:08,672 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:06:08,713 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:06:13,796 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:06:13,833 INFO org.apache.hadoop.mapreduce.Job: map 27% reduce 0% -2017-02-18 07:06:15,764 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:06:15,774 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:06:15,776 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:06:15,776 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 07:06:15,777 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 07:06:16,812 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:06:16,842 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 07:06:19,818 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:06:22,822 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:06:24,736 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:06:24,781 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 07:06:24,794 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:06:24,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_m_000000_0' done. -2017-02-18 07:06:24,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:24,807 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 07:06:24,876 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:06:24,935 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 07:06:24,936 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000000_0 -2017-02-18 07:06:25,000 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:25,002 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:25,006 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dbf1737 -2017-02-18 07:06:25,128 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:25,172 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:25,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 878989 len: 878993 to MEMORY -2017-02-18 07:06:25,466 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 878989 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:25,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 878989, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->878989 -2017-02-18 07:06:25,514 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:25,516 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:25,516 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:25,557 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:25,570 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 878982 bytes -2017-02-18 07:06:26,599 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 878989 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:26,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 878993 bytes from disk -2017-02-18 07:06:26,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:26,603 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:26,604 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 878982 bytes -2017-02-18 07:06:26,618 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:26,683 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 07:06:28,244 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 07:06:28,269 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:28,270 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000000_0 is allowed to commit now -2017-02-18 07:06:28,283 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000000 -2017-02-18 07:06:28,284 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:28,284 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000000_0' done. -2017-02-18 07:06:28,285 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000000_0 -2017-02-18 07:06:28,285 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000001_0 -2017-02-18 07:06:28,316 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:28,317 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:28,318 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@661ed391 -2017-02-18 07:06:28,323 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:28,340 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:28,347 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 1021001 len: 1021005 to MEMORY -2017-02-18 07:06:28,360 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1021001 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:28,388 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1021001, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1021001 -2017-02-18 07:06:28,389 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:28,390 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:28,390 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:28,392 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:28,393 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1020998 bytes -2017-02-18 07:06:28,885 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 07:06:28,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1021001 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:28,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1021005 bytes from disk -2017-02-18 07:06:28,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:28,900 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:28,902 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1020998 bytes -2017-02-18 07:06:28,906 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:29,480 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 07:06:29,495 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:29,504 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000001_0 is allowed to commit now -2017-02-18 07:06:29,506 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000001 -2017-02-18 07:06:29,510 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:29,521 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000001_0' done. -2017-02-18 07:06:29,522 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000001_0 -2017-02-18 07:06:29,522 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000002_0 -2017-02-18 07:06:29,533 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:29,534 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:29,537 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70f6431f -2017-02-18 07:06:29,557 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:29,575 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:29,598 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 1216330 len: 1216334 to MEMORY -2017-02-18 07:06:29,642 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1216330 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:29,643 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1216330, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1216330 -2017-02-18 07:06:29,643 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:29,644 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:29,645 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:29,646 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:29,646 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1216320 bytes -2017-02-18 07:06:29,887 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 07:06:30,172 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1216330 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:30,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1216334 bytes from disk -2017-02-18 07:06:30,176 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:30,177 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:30,177 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1216320 bytes -2017-02-18 07:06:30,182 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:30,696 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 07:06:30,710 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:30,713 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000002_0 is allowed to commit now -2017-02-18 07:06:30,714 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000002 -2017-02-18 07:06:30,724 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:30,727 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000002_0' done. -2017-02-18 07:06:30,728 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000002_0 -2017-02-18 07:06:30,729 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000003_0 -2017-02-18 07:06:30,740 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:30,743 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:30,749 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@51ff177c -2017-02-18 07:06:30,766 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:30,779 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:30,796 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 686549 len: 686553 to MEMORY -2017-02-18 07:06:30,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 686549 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:30,817 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 686549, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->686549 -2017-02-18 07:06:30,820 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:30,822 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:30,822 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:30,823 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:30,823 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 686541 bytes -2017-02-18 07:06:30,892 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 07:06:31,114 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 686549 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:31,116 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 686553 bytes from disk -2017-02-18 07:06:31,120 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:31,121 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:31,122 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 686541 bytes -2017-02-18 07:06:31,124 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:31,419 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 07:06:31,438 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:31,447 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000003_0 is allowed to commit now -2017-02-18 07:06:31,448 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000003 -2017-02-18 07:06:31,455 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:31,463 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000003_0' done. -2017-02-18 07:06:31,464 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000003_0 -2017-02-18 07:06:31,464 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000004_0 -2017-02-18 07:06:31,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:31,474 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:31,474 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f927f23 -2017-02-18 07:06:31,482 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:31,509 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:31,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 853557 len: 853561 to MEMORY -2017-02-18 07:06:31,534 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 853557 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:31,543 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 853557, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->853557 -2017-02-18 07:06:31,557 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:31,558 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:31,558 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:31,560 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:31,560 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 853550 bytes -2017-02-18 07:06:31,901 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 07:06:31,947 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 853557 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:31,949 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 853561 bytes from disk -2017-02-18 07:06:31,954 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:31,955 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:31,965 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 853550 bytes -2017-02-18 07:06:31,965 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:32,370 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 07:06:32,384 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:32,386 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000004_0 is allowed to commit now -2017-02-18 07:06:32,392 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000004 -2017-02-18 07:06:32,398 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:32,401 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000004_0' done. -2017-02-18 07:06:32,401 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000004_0 -2017-02-18 07:06:32,402 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000005_0 -2017-02-18 07:06:32,413 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:32,414 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:32,414 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6d6d43cd -2017-02-18 07:06:32,421 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:32,442 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:32,458 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 767280 len: 767284 to MEMORY -2017-02-18 07:06:32,468 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 767280 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:32,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 767280, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->767280 -2017-02-18 07:06:32,492 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:32,493 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:32,493 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:32,495 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:32,495 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 767274 bytes -2017-02-18 07:06:32,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 767280 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:32,802 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 767284 bytes from disk -2017-02-18 07:06:32,803 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:32,804 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:32,809 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 767274 bytes -2017-02-18 07:06:32,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:32,907 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% -2017-02-18 07:06:33,099 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 07:06:33,130 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:33,140 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000005_0 is allowed to commit now -2017-02-18 07:06:33,142 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000005 -2017-02-18 07:06:33,150 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:33,158 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000005_0' done. -2017-02-18 07:06:33,159 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000005_0 -2017-02-18 07:06:33,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000006_0 -2017-02-18 07:06:33,177 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:33,178 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:33,178 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@39a83e27 -2017-02-18 07:06:33,189 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:33,205 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:33,219 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 967027 len: 967031 to MEMORY -2017-02-18 07:06:33,239 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 967027 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:33,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 967027, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->967027 -2017-02-18 07:06:33,245 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:33,246 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:33,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:33,248 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:33,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 967018 bytes -2017-02-18 07:06:33,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 967027 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:33,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 967031 bytes from disk -2017-02-18 07:06:33,653 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:33,654 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:33,655 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 967018 bytes -2017-02-18 07:06:33,657 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:33,909 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 07:06:34,078 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 07:06:34,090 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:34,093 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000006_0 is allowed to commit now -2017-02-18 07:06:34,098 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000006 -2017-02-18 07:06:34,103 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:34,105 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000006_0' done. -2017-02-18 07:06:34,106 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000006_0 -2017-02-18 07:06:34,107 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000007_0 -2017-02-18 07:06:34,121 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:34,122 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:34,122 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@528d9c2c -2017-02-18 07:06:34,126 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:34,142 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:34,159 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 710901 len: 710905 to MEMORY -2017-02-18 07:06:34,163 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 710901 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:34,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 710901, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->710901 -2017-02-18 07:06:34,177 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:34,178 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:34,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:34,180 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:34,184 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 710893 bytes -2017-02-18 07:06:34,496 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 710901 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:34,502 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 710905 bytes from disk -2017-02-18 07:06:34,504 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:34,504 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:34,505 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 710893 bytes -2017-02-18 07:06:34,507 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:34,767 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 07:06:34,787 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:34,797 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000007_0 is allowed to commit now -2017-02-18 07:06:34,799 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000007 -2017-02-18 07:06:34,806 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:34,807 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000007_0' done. -2017-02-18 07:06:34,808 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000007_0 -2017-02-18 07:06:34,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000008_0 -2017-02-18 07:06:34,819 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:34,820 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:34,825 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7934bf83 -2017-02-18 07:06:34,844 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:34,856 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:34,877 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 1462439 len: 1462443 to MEMORY -2017-02-18 07:06:34,911 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:06:34,929 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1462439 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:34,929 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1462439, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1462439 -2017-02-18 07:06:34,930 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:34,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:34,931 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:34,933 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:34,933 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1462429 bytes -2017-02-18 07:06:35,531 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1462439 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:35,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1462443 bytes from disk -2017-02-18 07:06:35,535 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:35,536 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:35,537 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1462429 bytes -2017-02-18 07:06:35,538 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:35,912 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 07:06:36,164 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 07:06:36,168 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:36,169 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000008_0 is allowed to commit now -2017-02-18 07:06:36,169 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000008 -2017-02-18 07:06:36,186 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:36,189 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000008_0' done. -2017-02-18 07:06:36,191 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000008_0 -2017-02-18 07:06:36,192 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000009_0 -2017-02-18 07:06:36,202 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:06:36,203 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:06:36,207 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4fa16b41 -2017-02-18 07:06:36,220 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:06:36,242 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:06:36,259 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 563620 len: 563624 to MEMORY -2017-02-18 07:06:36,265 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 563620 bytes from map-output for attempt_local1414308832_0001_m_000000_0 -2017-02-18 07:06:36,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 563620, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->563620 -2017-02-18 07:06:36,290 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:06:36,292 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:36,292 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:06:36,293 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:36,293 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 563614 bytes -2017-02-18 07:06:36,505 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 563620 bytes to disk to satisfy reduce memory limit -2017-02-18 07:06:36,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 563624 bytes from disk -2017-02-18 07:06:36,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:06:36,512 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:06:36,513 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 563614 bytes -2017-02-18 07:06:36,515 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:36,738 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 07:06:36,755 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. -2017-02-18 07:06:36,757 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000009_0 is allowed to commit now -2017-02-18 07:06:36,758 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000009 -2017-02-18 07:06:36,767 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:06:36,770 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000009_0' done. -2017-02-18 07:06:36,773 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000009_0 -2017-02-18 07:06:36,774 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 07:06:36,916 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:06:36,937 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1414308832_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 07:06:37,921 INFO org.apache.hadoop.mapreduce.Job: Job job_local1414308832_0001 failed with state FAILED due to: NA -2017-02-18 07:06:38,139 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=151156389 - FILE: Number of bytes written=154418301 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=80476 - Map output records=793081 - Map output bytes=7541511 - Map output materialized bytes=9127733 - Input split bytes=118 - Combine input records=0 - Combine output records=0 - Reduce input groups=41775 - Reduce shuffle bytes=9127733 - Reduce input records=793081 - Reduce output records=26 - Spilled Records=1586162 - Shuffled Maps =10 - Failed Shuffles=0 - Merged Map outputs=10 - GC time elapsed (ms)=261 - Total committed heap usage (bytes)=1821749248 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=4454050 - File Output Format Counters - Bytes Written=344 -2017-02-18 07:08:55,332 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 07:08:57,528 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 07:08:57,561 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 07:08:59,544 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 07:08:59,596 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 07:09:00,022 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 07:09:01,311 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local484764157_0001 -2017-02-18 07:09:03,289 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 07:09:03,291 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local484764157_0001 -2017-02-18 07:09:03,305 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 07:09:03,360 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:09:03,371 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 07:09:03,810 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 07:09:03,812 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_m_000000_0 -2017-02-18 07:09:04,062 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:09:04,166 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:09:04,195 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 07:09:04,295 INFO org.apache.hadoop.mapreduce.Job: Job job_local484764157_0001 running in uber mode : false -2017-02-18 07:09:04,297 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 07:09:04,954 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:09:05,021 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:09:05,036 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:09:10,135 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:09:10,313 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% -2017-02-18 07:09:13,151 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:09:13,318 INFO org.apache.hadoop.mapreduce.Job: map 8% reduce 0% -2017-02-18 07:09:16,155 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:09:16,338 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% -2017-02-18 07:09:19,158 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:09:19,341 INFO org.apache.hadoop.mapreduce.Job: map 19% reduce 0% -2017-02-18 07:09:20,631 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 07:09:22,161 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:22,348 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 07:09:25,165 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:28,169 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:31,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:34,171 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:37,172 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:40,174 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:43,175 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:46,176 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:09:53,796 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:09:53,857 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 07:09:53,860 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:09:53,863 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_m_000000_0' done. -2017-02-18 07:09:53,878 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_m_000000_0 -2017-02-18 07:09:53,878 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_m_000001_0 -2017-02-18 07:09:53,883 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:09:53,884 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:09:53,886 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 07:09:54,297 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:09:54,307 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:09:54,308 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:09:54,308 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:09:54,309 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:09:54,314 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:09:54,333 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:09:54,416 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:09:59,653 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 07:09:59,920 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:10:00,425 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 07:10:02,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:10:05,933 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:10:08,938 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:10:09,893 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:10:09,926 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 07:10:09,933 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:10:09,938 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_m_000001_0' done. -2017-02-18 07:10:09,942 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:09,943 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:09,948 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:09,949 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:09,971 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 07:10:10,352 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:10:10,354 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:10:10,358 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:10:10,359 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:10:10,360 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:10:10,365 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:10:10,374 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:10:10,453 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:10:14,114 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 07:10:14,460 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 07:10:15,991 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:10:16,468 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 07:10:19,001 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:10:21,961 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:10:22,003 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:10:22,016 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 07:10:22,028 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:10:22,036 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_m_000002_0' done. -2017-02-18 07:10:22,037 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:22,038 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 07:10:22,146 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 07:10:22,147 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000000_0 -2017-02-18 07:10:22,220 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:22,221 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:22,258 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@194e2c2f -2017-02-18 07:10:22,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:22,450 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:22,488 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:10:22,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 878989 len: 878993 to MEMORY -2017-02-18 07:10:22,821 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 878989 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:22,851 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 878989, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->878989 -2017-02-18 07:10:22,886 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2462807 len: 2462811 to MEMORY -2017-02-18 07:10:22,956 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2462807 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:22,967 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2462807, inMemoryMapOutputs.size() -> 2, commitMemory -> 878989, usedMemory ->3341796 -2017-02-18 07:10:22,981 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 864040 len: 864044 to MEMORY -2017-02-18 07:10:22,986 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 864040 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:23,008 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 864040, inMemoryMapOutputs.size() -> 3, commitMemory -> 3341796, usedMemory ->4205836 -2017-02-18 07:10:23,009 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:23,010 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:23,010 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:23,066 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:23,068 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4205810 bytes -2017-02-18 07:10:25,129 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4205836 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:25,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4205836 bytes from disk -2017-02-18 07:10:25,142 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:25,144 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:25,145 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4205821 bytes -2017-02-18 07:10:25,150 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:25,239 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 07:10:27,819 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 07:10:27,845 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:27,846 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000000_0 is allowed to commit now -2017-02-18 07:10:27,847 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000000 -2017-02-18 07:10:27,860 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:27,861 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000000_0' done. -2017-02-18 07:10:27,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000000_0 -2017-02-18 07:10:27,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000001_0 -2017-02-18 07:10:27,879 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:27,880 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:27,881 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@731e0de -2017-02-18 07:10:27,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:27,917 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:27,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 1021001 len: 1021005 to MEMORY -2017-02-18 07:10:27,961 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1021001 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:27,979 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1021001, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1021001 -2017-02-18 07:10:27,990 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 3748098 len: 3748102 to MEMORY -2017-02-18 07:10:28,110 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3748098 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:28,117 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3748098, inMemoryMapOutputs.size() -> 2, commitMemory -> 1021001, usedMemory ->4769099 -2017-02-18 07:10:28,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 2021094 len: 2021098 to MEMORY -2017-02-18 07:10:28,163 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2021094 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:28,177 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2021094, inMemoryMapOutputs.size() -> 3, commitMemory -> 4769099, usedMemory ->6790193 -2017-02-18 07:10:28,183 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:28,184 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:28,184 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:28,187 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:28,187 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6790184 bytes -2017-02-18 07:10:28,504 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% -2017-02-18 07:10:30,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6790193 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:30,800 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6790193 bytes from disk -2017-02-18 07:10:30,800 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:30,800 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:30,801 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6790186 bytes -2017-02-18 07:10:30,801 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:33,873 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 07:10:33,884 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:33,890 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:33,895 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000001_0 is allowed to commit now -2017-02-18 07:10:33,902 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000001 -2017-02-18 07:10:33,905 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:33,912 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000001_0' done. -2017-02-18 07:10:33,913 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000001_0 -2017-02-18 07:10:33,914 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000002_0 -2017-02-18 07:10:33,931 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:33,932 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:33,933 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f08370b -2017-02-18 07:10:33,950 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:33,969 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:33,975 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 1216330 len: 1216334 to MEMORY -2017-02-18 07:10:33,998 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1216330 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:34,013 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1216330, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1216330 -2017-02-18 07:10:34,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 4957197 len: 4957201 to MEMORY -2017-02-18 07:10:34,165 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4957197 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:34,172 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4957197, inMemoryMapOutputs.size() -> 2, commitMemory -> 1216330, usedMemory ->6173527 -2017-02-18 07:10:34,185 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1268271 len: 1268275 to MEMORY -2017-02-18 07:10:34,192 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1268271 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:34,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1268271, inMemoryMapOutputs.size() -> 3, commitMemory -> 6173527, usedMemory ->7441798 -2017-02-18 07:10:34,211 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:34,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:34,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:34,217 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:34,217 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7441769 bytes -2017-02-18 07:10:34,519 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% -2017-02-18 07:10:36,887 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7441798 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:36,889 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7441798 bytes from disk -2017-02-18 07:10:36,889 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:36,889 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:36,890 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7441780 bytes -2017-02-18 07:10:36,897 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:39,695 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 07:10:39,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:39,733 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000002_0 is allowed to commit now -2017-02-18 07:10:39,735 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000002 -2017-02-18 07:10:39,740 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:39,746 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000002_0' done. -2017-02-18 07:10:39,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000002_0 -2017-02-18 07:10:39,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000003_0 -2017-02-18 07:10:39,758 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:39,759 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:39,759 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7655ff39 -2017-02-18 07:10:39,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:39,794 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:39,804 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 686549 len: 686553 to MEMORY -2017-02-18 07:10:39,814 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 686549 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:39,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 686549, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->686549 -2017-02-18 07:10:39,865 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2553004 len: 2553008 to MEMORY -2017-02-18 07:10:39,958 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2553004 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:39,960 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2553004, inMemoryMapOutputs.size() -> 2, commitMemory -> 686549, usedMemory ->3239553 -2017-02-18 07:10:39,967 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 947339 len: 947343 to MEMORY -2017-02-18 07:10:39,987 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 947339 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:40,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 947339, inMemoryMapOutputs.size() -> 3, commitMemory -> 3239553, usedMemory ->4186892 -2017-02-18 07:10:40,002 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:40,003 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:40,003 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:40,005 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:40,005 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4186872 bytes -2017-02-18 07:10:40,541 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 07:10:41,489 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4186892 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:41,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4186892 bytes from disk -2017-02-18 07:10:41,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:41,492 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:41,492 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4186881 bytes -2017-02-18 07:10:41,492 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:42,943 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 07:10:42,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:42,965 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000003_0 is allowed to commit now -2017-02-18 07:10:42,966 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000003 -2017-02-18 07:10:42,976 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:42,978 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000003_0' done. -2017-02-18 07:10:42,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000003_0 -2017-02-18 07:10:42,979 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000004_0 -2017-02-18 07:10:42,992 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:42,993 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:42,994 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@54babeeb -2017-02-18 07:10:42,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:43,031 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:43,065 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 853557 len: 853561 to MEMORY -2017-02-18 07:10:43,074 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 853557 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:43,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 853557, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->853557 -2017-02-18 07:10:43,108 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 3061786 len: 3061790 to MEMORY -2017-02-18 07:10:43,212 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3061786 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:43,212 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3061786, inMemoryMapOutputs.size() -> 2, commitMemory -> 853557, usedMemory ->3915343 -2017-02-18 07:10:43,220 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1002557 len: 1002561 to MEMORY -2017-02-18 07:10:43,238 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1002557 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:43,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1002557, inMemoryMapOutputs.size() -> 3, commitMemory -> 3915343, usedMemory ->4917900 -2017-02-18 07:10:43,257 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:43,258 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:43,258 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:43,260 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:43,260 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4917879 bytes -2017-02-18 07:10:43,583 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 07:10:45,164 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4917900 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:45,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4917900 bytes from disk -2017-02-18 07:10:45,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:45,166 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:45,167 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4917892 bytes -2017-02-18 07:10:45,167 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:46,884 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 07:10:46,899 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:46,903 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000004_0 is allowed to commit now -2017-02-18 07:10:46,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000004 -2017-02-18 07:10:46,911 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:46,917 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000004_0' done. -2017-02-18 07:10:46,918 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000004_0 -2017-02-18 07:10:46,919 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000005_0 -2017-02-18 07:10:46,930 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:46,931 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:46,931 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1808ffef -2017-02-18 07:10:46,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:46,967 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:46,980 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 767280 len: 767284 to MEMORY -2017-02-18 07:10:46,991 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 767280 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:47,015 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 767280, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->767280 -2017-02-18 07:10:47,018 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2671328 len: 2671332 to MEMORY -2017-02-18 07:10:47,079 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2671328 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:47,110 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2671328, inMemoryMapOutputs.size() -> 2, commitMemory -> 767280, usedMemory ->3438608 -2017-02-18 07:10:47,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 894060 len: 894064 to MEMORY -2017-02-18 07:10:47,132 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 894060 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:47,146 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 894060, inMemoryMapOutputs.size() -> 3, commitMemory -> 3438608, usedMemory ->4332668 -2017-02-18 07:10:47,148 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:47,149 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:47,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:47,150 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:47,151 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4332653 bytes -2017-02-18 07:10:47,595 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% -2017-02-18 07:10:48,614 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4332668 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:48,616 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4332668 bytes from disk -2017-02-18 07:10:48,618 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:48,618 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:48,619 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4332659 bytes -2017-02-18 07:10:48,619 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:50,071 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 07:10:50,076 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:50,076 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000005_0 is allowed to commit now -2017-02-18 07:10:50,077 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000005 -2017-02-18 07:10:50,088 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:50,090 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000005_0' done. -2017-02-18 07:10:50,091 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000005_0 -2017-02-18 07:10:50,092 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000006_0 -2017-02-18 07:10:50,100 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:50,101 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:50,110 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@28ae5b0b -2017-02-18 07:10:50,116 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:50,138 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:50,153 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 967027 len: 967031 to MEMORY -2017-02-18 07:10:50,160 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 967027 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:50,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 967027, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->967027 -2017-02-18 07:10:50,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 3388337 len: 3388341 to MEMORY -2017-02-18 07:10:50,286 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3388337 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:50,291 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3388337, inMemoryMapOutputs.size() -> 2, commitMemory -> 967027, usedMemory ->4355364 -2017-02-18 07:10:50,305 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1152263 len: 1152267 to MEMORY -2017-02-18 07:10:50,321 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1152263 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:50,330 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1152263, inMemoryMapOutputs.size() -> 3, commitMemory -> 4355364, usedMemory ->5507627 -2017-02-18 07:10:50,332 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:50,334 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:50,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:50,336 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:50,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5507601 bytes -2017-02-18 07:10:50,602 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 07:10:52,360 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5507627 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:52,362 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5507627 bytes from disk -2017-02-18 07:10:52,362 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:52,368 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:52,369 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5507615 bytes -2017-02-18 07:10:52,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:54,311 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 07:10:54,331 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:54,336 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000006_0 is allowed to commit now -2017-02-18 07:10:54,338 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000006 -2017-02-18 07:10:54,343 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:54,347 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000006_0' done. -2017-02-18 07:10:54,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000006_0 -2017-02-18 07:10:54,349 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000007_0 -2017-02-18 07:10:54,357 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:54,358 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:54,362 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68792330 -2017-02-18 07:10:54,374 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:54,401 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:54,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 710901 len: 710905 to MEMORY -2017-02-18 07:10:54,438 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 710901 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:54,438 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 710901, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->710901 -2017-02-18 07:10:54,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2633768 len: 2633772 to MEMORY -2017-02-18 07:10:54,521 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2633768 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:54,551 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2633768, inMemoryMapOutputs.size() -> 2, commitMemory -> 710901, usedMemory ->3344669 -2017-02-18 07:10:54,554 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 824970 len: 824974 to MEMORY -2017-02-18 07:10:54,574 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 824970 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:54,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 824970, inMemoryMapOutputs.size() -> 3, commitMemory -> 3344669, usedMemory ->4169639 -2017-02-18 07:10:54,586 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:54,587 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:54,587 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:54,589 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:54,589 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4169616 bytes -2017-02-18 07:10:54,634 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% -2017-02-18 07:10:55,946 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4169639 bytes to disk to satisfy reduce memory limit -2017-02-18 07:10:55,947 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4169639 bytes from disk -2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4169630 bytes -2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:57,339 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 07:10:57,350 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:57,358 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000007_0 is allowed to commit now -2017-02-18 07:10:57,363 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000007 -2017-02-18 07:10:57,365 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:10:57,367 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000007_0' done. -2017-02-18 07:10:57,368 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000007_0 -2017-02-18 07:10:57,374 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000008_0 -2017-02-18 07:10:57,379 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:10:57,380 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:10:57,381 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@32609547 -2017-02-18 07:10:57,393 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:10:57,404 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:10:57,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 1462439 len: 1462443 to MEMORY -2017-02-18 07:10:57,436 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1462439 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:10:57,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1462439, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1462439 -2017-02-18 07:10:57,479 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 5237666 len: 5237670 to MEMORY -2017-02-18 07:10:57,645 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:10:57,648 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5237666 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:10:57,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5237666, inMemoryMapOutputs.size() -> 2, commitMemory -> 1462439, usedMemory ->6700105 -2017-02-18 07:10:57,651 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1510887 len: 1510891 to MEMORY -2017-02-18 07:10:57,665 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1510887 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:10:57,687 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1510887, inMemoryMapOutputs.size() -> 3, commitMemory -> 6700105, usedMemory ->8210992 -2017-02-18 07:10:57,694 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:10:57,695 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:10:57,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:10:57,703 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:10:57,704 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8210965 bytes -2017-02-18 07:10:58,647 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 07:11:00,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8210992 bytes to disk to satisfy reduce memory limit -2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8210992 bytes from disk -2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8210983 bytes -2017-02-18 07:11:00,755 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:11:03,386 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:11:03,659 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% -2017-02-18 07:11:03,801 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 07:11:03,811 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:11:03,817 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000008_0 is allowed to commit now -2017-02-18 07:11:03,820 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000008 -2017-02-18 07:11:03,830 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:11:03,833 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000008_0' done. -2017-02-18 07:11:03,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000008_0 -2017-02-18 07:11:03,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000009_0 -2017-02-18 07:11:03,848 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:11:03,862 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:11:03,875 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@9b82279 -2017-02-18 07:11:03,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:11:03,896 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:11:03,914 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 563620 len: 563624 to MEMORY -2017-02-18 07:11:03,929 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 563620 bytes from map-output for attempt_local484764157_0001_m_000002_0 -2017-02-18 07:11:03,946 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 563620, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->563620 -2017-02-18 07:11:03,969 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 1935947 len: 1935951 to MEMORY -2017-02-18 07:11:04,044 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1935947 bytes from map-output for attempt_local484764157_0001_m_000000_0 -2017-02-18 07:11:04,047 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1935947, inMemoryMapOutputs.size() -> 2, commitMemory -> 563620, usedMemory ->2499567 -2017-02-18 07:11:04,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 733075 len: 733079 to MEMORY -2017-02-18 07:11:04,076 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 733075 bytes from map-output for attempt_local484764157_0001_m_000001_0 -2017-02-18 07:11:04,089 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 733075, inMemoryMapOutputs.size() -> 3, commitMemory -> 2499567, usedMemory ->3232642 -2017-02-18 07:11:04,092 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:11:04,093 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:11:04,093 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:11:04,095 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:11:04,095 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3232620 bytes -2017-02-18 07:11:05,090 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3232642 bytes to disk to satisfy reduce memory limit -2017-02-18 07:11:05,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3232642 bytes from disk -2017-02-18 07:11:05,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:11:05,092 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:11:05,093 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3232631 bytes -2017-02-18 07:11:05,093 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:11:06,076 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 07:11:06,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:11:06,117 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000009_0 is allowed to commit now -2017-02-18 07:11:06,118 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000009 -2017-02-18 07:11:06,128 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:11:06,131 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000009_0' done. -2017-02-18 07:11:06,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000009_0 -2017-02-18 07:11:06,133 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 07:11:06,356 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local484764157_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 07:11:06,670 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:11:06,670 INFO org.apache.hadoop.mapreduce.Job: Job job_local484764157_0001 failed with state FAILED due to: NA -2017-02-18 07:11:06,936 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=919728132 - FILE: Number of bytes written=960458626 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=52996307 - Input split bytes=351 - Combine input records=0 - Combine output records=0 - Reduce input groups=195336 - Reduce shuffle bytes=52996307 - Reduce input records=4678719 - Reduce output records=119 - Spilled Records=9357438 - Shuffled Maps =30 - Failed Shuffles=0 - Merged Map outputs=30 - GC time elapsed (ms)=871 - Total committed heap usage (bytes)=2551959552 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=26057874 - File Output Format Counters - Bytes Written=1297 -2017-02-18 07:35:20,526 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 07:35:22,634 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 07:35:22,642 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 07:35:24,655 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 07:35:24,723 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 07:35:25,182 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 07:35:26,701 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local378226183_0001 -2017-02-18 07:35:28,571 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 07:35:28,574 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local378226183_0001 -2017-02-18 07:35:28,595 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 07:35:28,670 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:35:28,679 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 07:35:29,112 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 07:35:29,114 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_m_000000_0 -2017-02-18 07:35:29,370 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:35:29,500 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:35:29,531 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 07:35:29,577 INFO org.apache.hadoop.mapreduce.Job: Job job_local378226183_0001 running in uber mode : false -2017-02-18 07:35:29,611 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 07:35:30,918 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:35:31,506 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:35:31,939 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:35:35,765 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:35:36,663 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% -2017-02-18 07:35:38,773 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:35:39,670 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% -2017-02-18 07:35:41,776 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:35:42,686 INFO org.apache.hadoop.mapreduce.Job: map 11% reduce 0% -2017-02-18 07:35:44,777 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:35:45,696 INFO org.apache.hadoop.mapreduce.Job: map 16% reduce 0% -2017-02-18 07:35:47,695 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:35:47,699 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:35:47,705 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:35:47,707 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 07:35:47,707 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 07:35:47,779 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:35:48,732 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 07:35:50,780 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:35:53,782 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:35:56,784 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:35:59,785 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:02,787 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:05,789 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:08,790 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:11,792 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:15,221 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:36:15,301 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 07:36:15,311 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:36:15,316 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_m_000000_0' done. -2017-02-18 07:36:15,318 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:15,319 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:15,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:15,329 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:15,356 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 07:36:15,807 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:36:15,812 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:36:15,815 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:36:15,815 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:36:15,816 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:36:15,816 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:36:15,829 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:36:15,848 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:36:21,514 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:36:21,613 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 07:36:21,613 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:36:21,613 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:36:21,614 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 07:36:21,614 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 07:36:21,838 INFO org.apache.hadoop.mapreduce.Job: map 55% reduce 0% -2017-02-18 07:36:24,614 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:24,850 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 07:36:27,615 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:29,379 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:36:29,393 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 07:36:29,402 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:36:29,408 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_m_000001_0' done. -2017-02-18 07:36:29,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:29,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:29,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:29,423 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:29,442 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 07:36:29,839 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 07:36:29,853 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 07:36:29,853 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 07:36:29,854 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 07:36:29,854 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 07:36:29,859 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 07:36:29,862 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:36:29,871 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 07:36:32,938 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 07:36:32,946 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 07:36:32,947 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 07:36:32,948 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 07:36:32,948 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 07:36:33,932 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 07:36:35,448 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:35,934 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 07:36:38,458 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 07:36:38,834 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 07:36:38,877 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 07:36:38,883 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 07:36:38,889 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_m_000002_0' done. -2017-02-18 07:36:38,890 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:38,897 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 07:36:38,945 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 07:36:39,045 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 07:36:39,046 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000000_0 -2017-02-18 07:36:39,127 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:39,128 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:39,171 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@712678ab -2017-02-18 07:36:39,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:39,396 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:39,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 07:36:39,804 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:39,822 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 -2017-02-18 07:36:39,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 30 len: 34 to MEMORY -2017-02-18 07:36:39,858 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:39,860 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->91 -2017-02-18 07:36:39,868 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 07:36:39,875 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:39,879 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 91, usedMemory ->122 -2017-02-18 07:36:39,882 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:39,883 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:39,883 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:39,948 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:39,950 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes -2017-02-18 07:36:39,958 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:39,960 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 122 bytes from disk -2017-02-18 07:36:39,984 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:39,984 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:39,985 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes -2017-02-18 07:36:39,992 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,087 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 07:36:40,097 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 07:36:40,107 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,115 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000000_0 is allowed to commit now -2017-02-18 07:36:40,117 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000000 -2017-02-18 07:36:40,127 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:40,129 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000000_0' done. -2017-02-18 07:36:40,130 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000000_0 -2017-02-18 07:36:40,131 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000001_0 -2017-02-18 07:36:40,142 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:40,143 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:40,143 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1b362c68 -2017-02-18 07:36:40,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:40,174 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:40,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 07:36:40,194 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:40,198 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 -2017-02-18 07:36:40,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 69 len: 73 to MEMORY -2017-02-18 07:36:40,227 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:40,233 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->223 -2017-02-18 07:36:40,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 07:36:40,250 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:40,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 3, commitMemory -> 223, usedMemory ->262 -2017-02-18 07:36:40,251 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:40,260 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,261 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:40,262 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:40,262 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes -2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 262 bytes from disk -2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:40,265 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes -2017-02-18 07:36:40,272 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,327 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 07:36:40,330 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,333 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000001_0 is allowed to commit now -2017-02-18 07:36:40,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000001 -2017-02-18 07:36:40,355 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:40,356 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000001_0' done. -2017-02-18 07:36:40,361 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000001_0 -2017-02-18 07:36:40,362 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000002_0 -2017-02-18 07:36:40,374 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:40,375 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:40,375 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@20bae047 -2017-02-18 07:36:40,383 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:40,410 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:40,429 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 07:36:40,430 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:40,435 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 -2017-02-18 07:36:40,439 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 41 len: 45 to MEMORY -2017-02-18 07:36:40,450 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:40,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->153 -2017-02-18 07:36:40,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 07:36:40,476 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:40,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 3, commitMemory -> 153, usedMemory ->204 -2017-02-18 07:36:40,482 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:40,483 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:40,485 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:40,485 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes -2017-02-18 07:36:40,489 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:40,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 204 bytes from disk -2017-02-18 07:36:40,497 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:40,498 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:40,499 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes -2017-02-18 07:36:40,504 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,560 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 07:36:40,562 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,562 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000002_0 is allowed to commit now -2017-02-18 07:36:40,563 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000002 -2017-02-18 07:36:40,578 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:40,579 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000002_0' done. -2017-02-18 07:36:40,579 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000002_0 -2017-02-18 07:36:40,579 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000003_0 -2017-02-18 07:36:40,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:40,601 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:40,601 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4e12001b -2017-02-18 07:36:40,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:40,634 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:40,643 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 07:36:40,655 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:40,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 -2017-02-18 07:36:40,674 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 11 len: 15 to MEMORY -2017-02-18 07:36:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 -2017-02-18 07:36:40,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 07:36:40,707 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:40,707 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 -2017-02-18 07:36:40,708 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:40,708 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,709 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:40,712 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:40,712 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes -2017-02-18 07:36:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk -2017-02-18 07:36:40,722 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:40,722 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:40,722 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes -2017-02-18 07:36:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 07:36:40,778 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,778 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000003_0 is allowed to commit now -2017-02-18 07:36:40,779 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000003 -2017-02-18 07:36:40,798 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:40,799 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000003_0' done. -2017-02-18 07:36:40,799 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000003_0 -2017-02-18 07:36:40,799 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000004_0 -2017-02-18 07:36:40,825 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:40,826 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:40,826 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@899cd30 -2017-02-18 07:36:40,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:40,859 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:40,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 07:36:40,880 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:40,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 -2017-02-18 07:36:40,889 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 33 len: 37 to MEMORY -2017-02-18 07:36:40,898 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:40,915 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->85 -2017-02-18 07:36:40,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 07:36:40,928 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:40,939 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 85, usedMemory ->116 -2017-02-18 07:36:40,940 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:40,941 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:40,941 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:40,943 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:40,944 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes -2017-02-18 07:36:40,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:40,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk -2017-02-18 07:36:40,953 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 07:36:40,962 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:40,962 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:40,962 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes -2017-02-18 07:36:40,963 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,049 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 07:36:41,051 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,051 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000004_0 is allowed to commit now -2017-02-18 07:36:41,052 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000004 -2017-02-18 07:36:41,053 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:41,053 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000004_0' done. -2017-02-18 07:36:41,053 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000004_0 -2017-02-18 07:36:41,054 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000005_0 -2017-02-18 07:36:41,077 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:41,078 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:41,087 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@76dab03c -2017-02-18 07:36:41,098 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:41,164 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:41,185 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 07:36:41,189 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:41,192 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 -2017-02-18 07:36:41,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 23 len: 27 to MEMORY -2017-02-18 07:36:41,207 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:41,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 -2017-02-18 07:36:41,223 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 07:36:41,236 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:41,239 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 -2017-02-18 07:36:41,241 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:41,242 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,243 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:41,245 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:41,245 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes -2017-02-18 07:36:41,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:41,250 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 100 bytes from disk -2017-02-18 07:36:41,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:41,253 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:41,254 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes -2017-02-18 07:36:41,259 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,313 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 07:36:41,315 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,316 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000005_0 is allowed to commit now -2017-02-18 07:36:41,317 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000005 -2017-02-18 07:36:41,324 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:41,331 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000005_0' done. -2017-02-18 07:36:41,333 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000005_0 -2017-02-18 07:36:41,333 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000006_0 -2017-02-18 07:36:41,357 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:41,358 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:41,359 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@14bddbc2 -2017-02-18 07:36:41,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:41,395 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:41,408 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 07:36:41,409 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 -2017-02-18 07:36:41,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 28 len: 32 to MEMORY -2017-02-18 07:36:41,429 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:41,432 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 -2017-02-18 07:36:41,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 07:36:41,460 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:41,461 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 -2017-02-18 07:36:41,461 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:41,462 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,462 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:41,463 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:41,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes -2017-02-18 07:36:41,469 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:41,470 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk -2017-02-18 07:36:41,470 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:41,470 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:41,471 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes -2017-02-18 07:36:41,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,525 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 07:36:41,529 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,542 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000006_0 is allowed to commit now -2017-02-18 07:36:41,544 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000006 -2017-02-18 07:36:41,548 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:41,550 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000006_0' done. -2017-02-18 07:36:41,551 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000006_0 -2017-02-18 07:36:41,551 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000007_0 -2017-02-18 07:36:41,560 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:41,561 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:41,566 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@709a7571 -2017-02-18 07:36:41,587 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:41,604 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:41,615 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 07:36:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:41,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 -2017-02-18 07:36:41,640 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 11 len: 15 to MEMORY -2017-02-18 07:36:41,651 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:41,654 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->99 -2017-02-18 07:36:41,659 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 07:36:41,671 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:41,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 99, usedMemory ->101 -2017-02-18 07:36:41,695 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:41,696 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,696 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:41,699 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:41,699 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes -2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk -2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:41,701 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes -2017-02-18 07:36:41,701 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 07:36:41,763 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,763 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000007_0 is allowed to commit now -2017-02-18 07:36:41,764 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000007 -2017-02-18 07:36:41,769 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:41,774 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000007_0' done. -2017-02-18 07:36:41,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000007_0 -2017-02-18 07:36:41,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000008_0 -2017-02-18 07:36:41,790 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:41,791 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:41,791 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6944e8c1 -2017-02-18 07:36:41,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:41,826 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:41,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 07:36:41,852 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:41,852 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 -2017-02-18 07:36:41,861 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 49 len: 53 to MEMORY -2017-02-18 07:36:41,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:41,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 -2017-02-18 07:36:41,886 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 07:36:41,890 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:41,892 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 -2017-02-18 07:36:41,903 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:41,904 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,904 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:41,905 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:41,905 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes -2017-02-18 07:36:41,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:41,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk -2017-02-18 07:36:41,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:41,918 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:41,918 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes -2017-02-18 07:36:41,934 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,962 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% -2017-02-18 07:36:41,982 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 07:36:41,984 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:41,984 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000008_0 is allowed to commit now -2017-02-18 07:36:41,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000008 -2017-02-18 07:36:41,995 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:41,999 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000008_0' done. -2017-02-18 07:36:42,002 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000008_0 -2017-02-18 07:36:42,004 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000009_0 -2017-02-18 07:36:42,011 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 07:36:42,034 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 07:36:42,035 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1419ce96 -2017-02-18 07:36:42,051 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 07:36:42,081 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 07:36:42,104 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 22 len: 26 to MEMORY -2017-02-18 07:36:42,118 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local378226183_0001_m_000000_0 -2017-02-18 07:36:42,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 -2017-02-18 07:36:42,133 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 13 len: 17 to MEMORY -2017-02-18 07:36:42,134 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local378226183_0001_m_000001_0 -2017-02-18 07:36:42,144 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->35 -2017-02-18 07:36:42,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 07:36:42,164 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local378226183_0001_m_000002_0 -2017-02-18 07:36:42,165 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37 -2017-02-18 07:36:42,165 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 07:36:42,166 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:42,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 07:36:42,168 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 07:36:42,179 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes -2017-02-18 07:36:42,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit -2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 37 bytes from disk -2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes -2017-02-18 07:36:42,189 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:42,235 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 07:36:42,237 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 07:36:42,237 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000009_0 is allowed to commit now -2017-02-18 07:36:42,238 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000009 -2017-02-18 07:36:42,243 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 07:36:42,245 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000009_0' done. -2017-02-18 07:36:42,245 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000009_0 -2017-02-18 07:36:42,246 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 07:36:42,466 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local378226183_0001 -java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) -Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest - at java.net.URLClassLoader$1.run(URLClassLoader.java:366) - at java.net.URLClassLoader$1.run(URLClassLoader.java:355) - at java.security.AccessController.doPrivileged(Native Method) - at java.net.URLClassLoader.findClass(URLClassLoader.java:354) - at java.lang.ClassLoader.loadClass(ClassLoader.java:425) - at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) - at java.lang.ClassLoader.loadClass(ClassLoader.java:358) - ... 1 more -2017-02-18 07:36:42,968 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 07:36:42,968 INFO org.apache.hadoop.mapreduce.Job: Job job_local378226183_0001 failed with state FAILED due to: NA -2017-02-18 07:36:43,236 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 - File System Counters - FILE: Number of bytes read=324370280 - FILE: Number of bytes written=3632281 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=1471 - Input split bytes=351 - Combine input records=4678719 - Combine output records=131 - Reduce input groups=77 - Reduce shuffle bytes=1471 - Reduce input records=131 - Reduce output records=77 - Spilled Records=262 - Shuffled Maps =30 - Failed Shuffles=0 - Merged Map outputs=30 - GC time elapsed (ms)=1013 - Total committed heap usage (bytes)=2551959552 - Shuffle Errors - BAD_ID=0 - CONNECTION=0 - IO_ERROR=0 - WRONG_LENGTH=0 - WRONG_MAP=0 - WRONG_REDUCE=0 - File Input Format Counters - Bytes Read=26057874 - File Output Format Counters - Bytes Written=862 -2017-02-18 08:10:46,450 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 08:10:48,908 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 08:10:48,937 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 08:10:51,128 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 08:10:51,224 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 08:10:51,664 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 08:10:53,086 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2083140530_0001 -2017-02-18 08:10:54,977 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 08:10:54,988 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2083140530_0001 -2017-02-18 08:10:54,999 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 08:10:55,060 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:10:55,072 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 08:10:55,531 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 08:10:55,533 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:10:55,777 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:10:55,921 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:10:55,926 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 08:10:55,999 INFO org.apache.hadoop.mapreduce.Job: Job job_local2083140530_0001 running in uber mode : false -2017-02-18 08:10:56,004 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 08:10:57,831 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:10:57,831 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:10:57,929 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:10:58,049 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:11:01,985 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:11:02,018 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% -2017-02-18 08:11:04,989 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:11:05,027 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% -2017-02-18 08:11:07,994 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:11:08,039 INFO org.apache.hadoop.mapreduce.Job: map 11% reduce 0% -2017-02-18 08:11:10,996 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:11:11,048 INFO org.apache.hadoop.mapreduce.Job: map 16% reduce 0% -2017-02-18 08:11:14,000 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:11:14,037 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:11:14,044 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:11:14,045 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:11:14,046 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 08:11:14,046 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 08:11:14,052 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 08:11:17,004 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:20,010 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:23,012 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:26,014 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:29,016 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:32,024 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:35,034 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:38,040 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:40,745 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 08:11:40,810 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 08:11:40,824 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 08:11:40,827 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000000_0' done. -2017-02-18 08:11:40,832 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:11:40,834 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:11:40,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:11:40,841 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:11:40,861 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 08:11:41,154 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 08:11:41,280 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:11:41,288 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:11:41,289 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:11:41,290 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:11:41,290 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:11:41,298 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:11:41,312 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:11:45,711 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 08:11:45,730 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:11:45,731 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:11:45,732 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 08:11:45,733 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 08:11:46,174 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 08:11:46,865 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:47,176 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 08:11:49,867 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:52,868 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:11:53,319 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 08:11:53,334 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 08:11:53,341 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 08:11:53,346 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000001_0' done. -2017-02-18 08:11:53,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:11:53,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:11:53,363 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:11:53,364 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:11:53,381 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 08:11:53,771 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:11:53,780 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:11:53,781 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:11:53,782 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:11:53,782 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:11:53,795 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:11:53,802 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:11:54,199 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 08:11:56,809 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 08:11:56,821 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:11:56,822 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:11:56,823 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 08:11:56,823 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 08:11:57,211 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 08:11:59,395 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:12:00,220 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 08:12:02,396 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:12:02,603 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 08:12:02,632 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000002_0' done. -2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:02,654 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 08:12:02,808 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 08:12:02,815 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000000_0 -2017-02-18 08:12:02,925 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:02,926 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:02,964 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@79758369 -2017-02-18 08:12:03,119 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:03,152 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:03,245 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 08:12:03,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:12:03,505 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:03,516 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 -2017-02-18 08:12:03,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 08:12:03,556 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:03,557 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->92 -2017-02-18 08:12:03,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 30 len: 34 to MEMORY -2017-02-18 08:12:03,575 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:03,578 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 3, commitMemory -> 92, usedMemory ->122 -2017-02-18 08:12:03,580 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:03,581 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:03,581 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:03,649 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:03,650 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes -2017-02-18 08:12:03,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:03,661 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 122 bytes from disk -2017-02-18 08:12:03,685 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:03,685 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:03,686 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes -2017-02-18 08:12:03,688 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:03,785 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000001_0 -2017-02-18 08:12:03,810 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:03,820 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:03,820 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5b4e1b74 -2017-02-18 08:12:03,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:03,849 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:03,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:12:03,862 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:03,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 -2017-02-18 08:12:03,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 08:12:03,880 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:03,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->193 -2017-02-18 08:12:03,889 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 69 len: 73 to MEMORY -2017-02-18 08:12:03,903 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:03,909 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 3, commitMemory -> 193, usedMemory ->262 -2017-02-18 08:12:03,911 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:03,912 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:03,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:03,916 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:03,916 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes -2017-02-18 08:12:03,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:03,918 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 262 bytes from disk -2017-02-18 08:12:03,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:03,927 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:03,928 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes -2017-02-18 08:12:03,930 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:03,986 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000002_0 -2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a6a1f5b -2017-02-18 08:12:03,994 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:04,019 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:04,034 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:12:04,036 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:04,040 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 -2017-02-18 08:12:04,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 08:12:04,058 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:04,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->163 -2017-02-18 08:12:04,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 41 len: 45 to MEMORY -2017-02-18 08:12:04,087 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:04,088 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 3, commitMemory -> 163, usedMemory ->204 -2017-02-18 08:12:04,088 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:04,089 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,089 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:04,091 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:04,091 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes -2017-02-18 08:12:04,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 204 bytes from disk -2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:04,109 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes -2017-02-18 08:12:04,109 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,151 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000003_0 -2017-02-18 08:12:04,168 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:04,168 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:04,169 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7934bf83 -2017-02-18 08:12:04,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:04,196 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:04,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:12:04,215 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:04,225 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 -2017-02-18 08:12:04,227 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 08:12:04,232 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:04,241 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 -2017-02-18 08:12:04,246 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 11 len: 15 to MEMORY -2017-02-18 08:12:04,256 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:04,260 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 -2017-02-18 08:12:04,261 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:04,262 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,262 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:04,269 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:04,269 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes -2017-02-18 08:12:04,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:04,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk -2017-02-18 08:12:04,281 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:04,283 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:04,285 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes -2017-02-18 08:12:04,286 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,335 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000004_0 -2017-02-18 08:12:04,341 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:04,342 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:04,342 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4c122623 -2017-02-18 08:12:04,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:04,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:04,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:12:04,392 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:04,392 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 -2017-02-18 08:12:04,394 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 08:12:04,407 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:04,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->83 -2017-02-18 08:12:04,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 33 len: 37 to MEMORY -2017-02-18 08:12:04,435 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:04,435 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 3, commitMemory -> 83, usedMemory ->116 -2017-02-18 08:12:04,436 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:04,437 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,437 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:04,438 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:04,438 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes -2017-02-18 08:12:04,445 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk -2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes -2017-02-18 08:12:04,454 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,498 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000005_0 -2017-02-18 08:12:04,506 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:04,508 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:04,508 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7adb5354 -2017-02-18 08:12:04,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:04,540 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:04,552 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:12:04,566 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:04,566 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 -2017-02-18 08:12:04,576 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 08:12:04,581 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:04,586 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 -2017-02-18 08:12:04,589 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 23 len: 27 to MEMORY -2017-02-18 08:12:04,601 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:04,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 -2017-02-18 08:12:04,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:04,606 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:04,608 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:04,608 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes -2017-02-18 08:12:04,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:04,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 100 bytes from disk -2017-02-18 08:12:04,613 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:04,613 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:04,620 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes -2017-02-18 08:12:04,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,663 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000006_0 -2017-02-18 08:12:04,684 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:04,685 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:04,686 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4ab6b2ab -2017-02-18 08:12:04,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:04,706 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:04,719 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:12:04,724 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:04,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 -2017-02-18 08:12:04,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 08:12:04,760 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:04,760 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 -2017-02-18 08:12:04,762 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 28 len: 32 to MEMORY -2017-02-18 08:12:04,779 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:04,779 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 -2017-02-18 08:12:04,785 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:04,786 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,787 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:04,788 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:04,788 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes -2017-02-18 08:12:04,794 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk -2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:04,796 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes -2017-02-18 08:12:04,808 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,868 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000007_0 -2017-02-18 08:12:04,873 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:04,874 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:04,874 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2b2083d9 -2017-02-18 08:12:04,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:04,894 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:04,916 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:12:04,921 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:04,924 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 -2017-02-18 08:12:04,930 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 08:12:04,942 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:04,944 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->90 -2017-02-18 08:12:04,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 11 len: 15 to MEMORY -2017-02-18 08:12:04,955 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:04,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 90, usedMemory ->101 -2017-02-18 08:12:04,973 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:04,973 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:04,974 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:04,977 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:04,977 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes -2017-02-18 08:12:04,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:04,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk -2017-02-18 08:12:04,979 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:04,979 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:04,986 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes -2017-02-18 08:12:04,987 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:05,044 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000008_0 -2017-02-18 08:12:05,049 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:05,050 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:05,050 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@d62d2df -2017-02-18 08:12:05,055 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:05,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:05,085 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 08:12:05,096 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:05,097 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 -2017-02-18 08:12:05,100 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 08:12:05,105 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:05,109 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 -2017-02-18 08:12:05,112 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 49 len: 53 to MEMORY -2017-02-18 08:12:05,135 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:05,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 -2017-02-18 08:12:05,139 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:05,140 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:05,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:05,147 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:05,158 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes -2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk -2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:05,161 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:05,161 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes -2017-02-18 08:12:05,162 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:05,228 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000009_0 -2017-02-18 08:12:05,240 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:12:05,241 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:12:05,241 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4066a7f5 -2017-02-18 08:12:05,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:12:05,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:12:05,281 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 22 len: 26 to MEMORY -2017-02-18 08:12:05,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local2083140530_0001_m_000000_0 -2017-02-18 08:12:05,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 -2017-02-18 08:12:05,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 08:12:05,316 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2083140530_0001_m_000002_0 -2017-02-18 08:12:05,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->24 -2017-02-18 08:12:05,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 13 len: 17 to MEMORY -2017-02-18 08:12:05,339 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local2083140530_0001_m_000001_0 -2017-02-18 08:12:05,342 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 24, usedMemory ->37 -2017-02-18 08:12:05,346 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 08:12:05,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:05,347 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 08:12:05,349 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 08:12:05,350 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes -2017-02-18 08:12:05,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit -2017-02-18 08:12:05,354 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 37 bytes from disk -2017-02-18 08:12:05,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 08:12:05,359 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 08:12:05,360 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes -2017-02-18 08:12:05,368 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 08:12:05,420 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 08:12:05,476 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2083140530_0001 -java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. +2017-02-19 02:25:29,811 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-19 02:26:23,282 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-19 02:26:27,134 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-19 02:26:27,155 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-19 02:26:29,463 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-19 02:26:29,621 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-19 02:26:30,236 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-19 02:26:32,316 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local816132518_0001 +2017-02-19 02:26:34,621 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-19 02:26:34,622 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local816132518_0001 +2017-02-19 02:26:34,646 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-19 02:26:34,722 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:26:34,739 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-19 02:26:35,249 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-19 02:26:35,251 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local816132518_0001_m_000000_0 +2017-02-19 02:26:35,554 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:26:35,625 INFO org.apache.hadoop.mapreduce.Job: Job job_local816132518_0001 running in uber mode : false +2017-02-19 02:26:35,662 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-19 02:26:35,741 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:26:35,759 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-19 02:26:37,229 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 02:26:37,229 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 02:26:37,229 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 02:26:37,229 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 02:26:37,230 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 02:26:37,253 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 02:26:37,283 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 02:26:41,675 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:26:42,674 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-19 02:26:44,694 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:26:45,679 INFO org.apache.hadoop.mapreduce.Job: map 7% reduce 0% +2017-02-19 02:26:47,696 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:26:48,682 INFO org.apache.hadoop.mapreduce.Job: map 13% reduce 0% +2017-02-19 02:26:50,698 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:26:51,685 INFO org.apache.hadoop.mapreduce.Job: map 18% reduce 0% +2017-02-19 02:26:51,724 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 02:26:51,742 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 42611793; bufvoid = 104857600 +2017-02-19 02:26:51,744 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 15895828(63583312); length = 10318569/6553600 +2017-02-19 02:26:51,747 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 53097537 kvi 13274380(53097520) +2017-02-19 02:26:52,846 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:26:52,847 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 02:26:53,699 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:26:54,688 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-19 02:26:56,700 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:26:59,704 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:02,706 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:05,707 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:08,708 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:11,710 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:14,711 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:17,712 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:20,713 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:23,714 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:26,715 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:27:38,940 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 02:27:38,946 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 53097537 kv 13274380(53097520) kvi 12424484(49697936) +2017-02-19 02:27:38,946 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 02:27:38,946 INFO org.apache.hadoop.mapred.MapTask: bufstart = 53097537; bufend = 56605708; bufvoid = 104857600 +2017-02-19 02:27:38,947 INFO org.apache.hadoop.mapred.MapTask: kvstart = 13274380(53097520); kvend = 12424488(49697952); length = 849893/6553600 +2017-02-19 02:27:41,788 INFO org.apache.hadoop.mapred.MapTask: Finished spill 1 +2017-02-19 02:27:42,148 INFO org.apache.hadoop.mapred.Merger: Merging 2 sorted segments +2017-02-19 02:27:42,273 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 51704192 bytes +2017-02-19 02:27:44,719 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:27:44,746 INFO org.apache.hadoop.mapreduce.Job: map 23% reduce 0% +2017-02-19 02:27:47,720 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:27:47,749 INFO org.apache.hadoop.mapreduce.Job: map 24% reduce 0% +2017-02-19 02:27:50,721 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:27:50,753 INFO org.apache.hadoop.mapreduce.Job: map 25% reduce 0% +2017-02-19 02:27:53,722 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:27:53,755 INFO org.apache.hadoop.mapreduce.Job: map 27% reduce 0% +2017-02-19 02:27:56,723 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:27:56,758 INFO org.apache.hadoop.mapreduce.Job: map 28% reduce 0% +2017-02-19 02:27:59,724 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:27:59,760 INFO org.apache.hadoop.mapreduce.Job: map 29% reduce 0% +2017-02-19 02:28:02,725 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:28:02,763 INFO org.apache.hadoop.mapreduce.Job: map 31% reduce 0% +2017-02-19 02:28:05,726 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:28:05,766 INFO org.apache.hadoop.mapreduce.Job: map 32% reduce 0% +2017-02-19 02:28:08,727 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort > +2017-02-19 02:28:08,769 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-19 02:28:09,211 INFO org.apache.hadoop.mapred.Task: Task:attempt_local816132518_0001_m_000000_0 is done. And is in the process of committing +2017-02-19 02:28:09,234 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:09,239 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local816132518_0001_m_000000_0' done. +2017-02-19 02:28:09,242 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local816132518_0001_m_000000_0 +2017-02-19 02:28:09,245 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local816132518_0001_m_000001_0 +2017-02-19 02:28:09,258 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:28:09,258 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:28:09,268 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-19 02:28:09,770 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 02:28:09,800 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 02:28:09,806 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 02:28:09,807 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 02:28:09,808 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 02:28:09,808 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 02:28:09,817 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 02:28:10,087 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 02:28:14,459 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-19 02:28:14,463 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 02:28:14,466 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 02:28:14,467 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 13999099; bufvoid = 104857600 +2017-02-19 02:28:14,467 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22598156(90392624); length = 3616241/6553600 +2017-02-19 02:28:14,773 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-19 02:28:15,270 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:15,775 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-19 02:28:18,271 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:21,273 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:24,273 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:27,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:29,312 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 02:28:29,339 INFO org.apache.hadoop.mapred.Task: Task:attempt_local816132518_0001_m_000001_0 is done. And is in the process of committing +2017-02-19 02:28:29,352 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 02:28:29,359 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local816132518_0001_m_000001_0' done. +2017-02-19 02:28:29,361 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local816132518_0001_m_000001_0 +2017-02-19 02:28:29,364 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local816132518_0001_m_000002_0 +2017-02-19 02:28:29,386 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:28:29,388 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:28:29,418 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-19 02:28:29,791 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 02:28:29,980 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 02:28:29,988 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 02:28:29,990 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 02:28:29,991 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 02:28:29,991 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 02:28:30,009 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 02:28:30,044 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 02:28:33,034 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-19 02:28:33,044 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 02:28:33,044 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 02:28:33,044 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 13719018; bufvoid = 104857600 +2017-02-19 02:28:33,044 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23093504(92374016); length = 3120893/6553600 +2017-02-19 02:28:33,822 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-19 02:28:35,399 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:35,825 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-19 02:28:38,401 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:41,402 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:44,405 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:28:44,759 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 02:28:44,778 INFO org.apache.hadoop.mapred.Task: Task:attempt_local816132518_0001_m_000002_0 is done. And is in the process of committing +2017-02-19 02:28:44,788 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 02:28:44,796 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local816132518_0001_m_000002_0' done. +2017-02-19 02:28:44,799 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local816132518_0001_m_000002_0 +2017-02-19 02:28:44,801 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-19 02:28:44,832 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 02:28:44,858 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-19 02:28:44,859 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local816132518_0001_r_000000_0 +2017-02-19 02:28:44,955 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:28:44,957 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:28:45,011 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6cc6a82c +2017-02-19 02:28:45,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-19 02:28:45,273 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local816132518_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-19 02:28:45,694 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local816132518_0001_m_000002_0 decomp: 15279468 len: 15279472 to MEMORY +2017-02-19 02:28:46,039 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 15279468 bytes from map-output for attempt_local816132518_0001_m_000002_0 +2017-02-19 02:28:46,067 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 15279468, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->15279468 +2017-02-19 02:28:46,271 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local816132518_0001_m_000001_0 decomp: 15807223 len: 15807227 to MEMORY +2017-02-19 02:28:46,643 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 15807223 bytes from map-output for attempt_local816132518_0001_m_000001_0 +2017-02-19 02:28:46,644 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 15807223, inMemoryMapOutputs.size() -> 2, commitMemory -> 15279468, usedMemory ->31086691 +2017-02-19 02:28:47,428 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local816132518_0001_m_000000_0 decomp: 51704200 len: 51704204 to MEMORY +2017-02-19 02:28:50,792 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51704200 bytes from map-output for attempt_local816132518_0001_m_000000_0 +2017-02-19 02:28:50,800 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51704200, inMemoryMapOutputs.size() -> 3, commitMemory -> 31086691, usedMemory ->82790891 +2017-02-19 02:28:50,812 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-19 02:28:50,814 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-19 02:28:50,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-19 02:28:50,835 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-19 02:28:50,836 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82790873 bytes +2017-02-19 02:28:50,970 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:28:51,556 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 33% +2017-02-19 02:28:53,971 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:28:54,559 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 36% +2017-02-19 02:28:56,973 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:28:57,563 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-19 02:28:59,974 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:29:00,566 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 44% +2017-02-19 02:29:02,975 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:29:03,570 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 48% +2017-02-19 02:29:05,977 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:29:06,573 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 52% +2017-02-19 02:29:08,978 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:29:09,576 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 56% +2017-02-19 02:29:11,980 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:29:12,580 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-19 02:29:14,980 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:29:15,584 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 64% +2017-02-19 02:29:17,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 82790891 bytes to disk to satisfy reduce memory limit +2017-02-19 02:29:17,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 82790891 bytes from disk +2017-02-19 02:29:17,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-19 02:29:17,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-19 02:29:17,152 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 82790883 bytes +2017-02-19 02:29:17,153 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > sort +2017-02-19 02:29:17,206 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-19 02:29:17,382 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local816132518_0001 +java.lang.Exception: java.lang.RuntimeException: java.lang.NoSuchMethodException: Question2.InvIndex$Reduce.<init>() at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) -Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CompressionCodec$Util.createOutputStreamWithCodecPool(CompressionCodec.java:131) - at org.apache.hadoop.io.compress.SnappyCodec.createOutputStream(SnappyCodec.java:99) - at org.apache.hadoop.mapreduce.lib.output.TextOutputFormat.getRecordWriter(TextOutputFormat.java:136) - at org.apache.hadoop.mapred.ReduceTask$NewTrackingRecordWriter.<init>(ReduceTask.java:540) - at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:614) +Caused by: java.lang.RuntimeException: java.lang.NoSuchMethodException: Question2.InvIndex$Reduce.<init>() + at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:131) + at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:611) at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389) at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) @@ -4812,32 +201,38 @@ Caused by: java.lang.RuntimeException: native snappy library not available: this at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) -2017-02-18 08:12:06,253 INFO org.apache.hadoop.mapreduce.Job: Job job_local2083140530_0001 failed with state FAILED due to: NA -2017-02-18 08:12:06,525 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 +Caused by: java.lang.NoSuchMethodException: Question2.InvIndex$Reduce.<init>() + at java.lang.Class.getConstructor0(Class.java:2849) + at java.lang.Class.getDeclaredConstructor(Class.java:2053) + at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:125) + ... 8 more +2017-02-19 02:29:17,589 INFO org.apache.hadoop.mapreduce.Job: Job job_local816132518_0001 failed with state FAILED due to: NA +2017-02-19 02:29:17,982 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 02:29:18,019 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 File System Counters - FILE: Number of bytes read=63678066 - FILE: Number of bytes written=838216 + FILE: Number of bytes read=379344960 + FILE: Number of bytes written=568985052 FILE: Number of read operations=0 FILE: Number of large read operations=0 FILE: Number of write operations=0 Map-Reduce Framework Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=1471 + Map output records=4476402 + Map output bytes=73838081 + Map output materialized bytes=82790903 Input split bytes=351 - Combine input records=4678719 - Combine output records=131 + Combine input records=0 + Combine output records=0 Reduce input groups=0 - Reduce shuffle bytes=1471 + Reduce shuffle bytes=82790903 Reduce input records=0 Reduce output records=0 - Spilled Records=131 - Shuffled Maps =30 + Spilled Records=7268519 + Shuffled Maps =3 Failed Shuffles=0 - Merged Map outputs=30 - GC time elapsed (ms)=951 - Total committed heap usage (bytes)=576008192 + Merged Map outputs=3 + GC time elapsed (ms)=2048 + Total committed heap usage (bytes)=773603328 Shuffle Errors BAD_ID=0 CONNECTION=0 @@ -4849,3723 +244,224 @@ Caused by: java.lang.RuntimeException: native snappy library not available: this Bytes Read=26057874 File Output Format Counters Bytes Written=0 -2017-02-18 08:51:02,416 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 08:51:02,998 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress -2017-02-18 08:51:03,001 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec -2017-02-18 08:51:04,900 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 08:51:04,911 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 08:51:07,102 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 08:51:07,231 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 08:51:07,733 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 08:51:09,552 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local986354165_0001 -2017-02-18 08:51:11,300 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 08:51:11,301 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local986354165_0001 -2017-02-18 08:51:11,322 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 08:51:11,377 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:51:11,379 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 08:51:11,841 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 08:51:11,843 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000000_0 -2017-02-18 08:51:12,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:51:12,283 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:51:12,304 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 08:51:12,322 INFO org.apache.hadoop.mapreduce.Job: Job job_local986354165_0001 running in uber mode : false -2017-02-18 08:51:12,350 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:51:15,352 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:51:15,451 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:51:21,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:51:21,379 INFO org.apache.hadoop.mapreduce.Job: map 4% reduce 0% -2017-02-18 08:51:24,298 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:51:24,389 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% -2017-02-18 08:51:27,309 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:51:27,399 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% -2017-02-18 08:51:30,311 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:51:30,406 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% -2017-02-18 08:51:30,786 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:51:30,789 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:51:30,790 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:51:30,791 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 08:51:30,791 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 08:51:33,314 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:33,416 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 08:51:36,318 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:39,322 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:42,328 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:45,334 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:48,337 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:50,854 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 14765620(59062480) -2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 08:51:51,341 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:54,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:51:57,347 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:00,348 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:03,356 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:06,360 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:09,366 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:09,648 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@f6a4c4a -java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 08:52:09,699 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000001_0 -2017-02-18 08:52:09,706 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:52:09,707 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:52:09,710 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 08:52:11,414 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:52:11,494 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:52:11,551 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:52:12,367 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:15,633 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:15,716 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:52:16,551 INFO org.apache.hadoop.mapreduce.Job: map 35% reduce 0% -2017-02-18 08:52:17,568 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 08:52:18,723 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:19,559 INFO org.apache.hadoop.mapreduce.Job: map 44% reduce 0% -2017-02-18 08:52:21,725 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:23,449 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 22120620(88482480) -2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 08:52:23,464 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 08:52:24,726 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:27,732 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:28,910 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@4cb8cd94 -java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 08:52:28,933 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000002_0 -2017-02-18 08:52:29,198 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:52:29,199 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:52:29,242 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 08:52:29,745 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:52:29,747 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:52:29,747 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:52:29,748 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:52:29,748 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:52:29,756 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:52:29,784 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:52:35,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:38,522 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 08:52:38,548 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:52:38,548 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:52:38,550 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 08:52:38,550 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 08:52:38,800 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:40,425 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:41,425 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 08:52:43,433 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:45,067 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:52:45,078 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 23042072(92168288) -2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 08:52:46,435 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:48,811 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@601a1ecd -java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 08:52:48,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 08:52:49,063 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local986354165_0001 -java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) -Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 08:52:49,437 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:52:49,458 INFO org.apache.hadoop.mapreduce.Job: Job job_local986354165_0001 failed with state FAILED due to: NA -2017-02-18 08:52:49,693 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 - File System Counters - FILE: Number of bytes read=70901752 - FILE: Number of bytes written=829530 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=0 - Input split bytes=351 - Combine input records=0 - Combine output records=0 - Spilled Records=0 - Failed Shuffles=0 - Merged Map outputs=0 - GC time elapsed (ms)=11806 - Total committed heap usage (bytes)=1413275648 - File Input Format Counters - Bytes Read=26057874 -2017-02-18 08:56:05,243 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 08:56:05,794 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress -2017-02-18 08:56:05,837 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec -2017-02-18 08:56:08,084 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 08:56:08,126 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 08:56:11,092 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 08:56:11,184 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 08:56:11,584 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 08:56:13,206 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1323619014_0001 -2017-02-18 08:56:15,020 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 08:56:15,022 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1323619014_0001 -2017-02-18 08:56:15,035 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 08:56:15,105 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:56:15,119 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 08:56:15,525 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 08:56:15,526 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000000_0 -2017-02-18 08:56:15,791 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:56:15,934 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:56:15,969 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 08:56:16,025 INFO org.apache.hadoop.mapreduce.Job: Job job_local1323619014_0001 running in uber mode : false -2017-02-18 08:56:16,028 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 08:56:16,562 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:56:16,568 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:56:16,569 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:56:16,569 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:56:16,570 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:56:16,609 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:56:16,664 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:56:21,875 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:56:22,086 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% -2017-02-18 08:56:24,895 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:56:25,136 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% -2017-02-18 08:56:27,896 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:56:28,148 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% -2017-02-18 08:56:30,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:56:31,158 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% -2017-02-18 08:56:31,565 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 08:56:31,574 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:56:31,575 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:56:31,576 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 08:56:31,576 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 08:56:33,907 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:34,166 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 08:56:36,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:39,916 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:42,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:45,925 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:48,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:50,241 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:50,247 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:51,938 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:52,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:52,099 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:52,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:52,933 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:53,960 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:53,965 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:54,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:54,329 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:54,752 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:54,785 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:54,941 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:56:55,192 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:55,203 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:55,743 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:55,745 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:56,127 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:56,137 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:56,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:56:56,912 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:56:57,187 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 08:56:57,253 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 08:56:57,256 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 08:56:57,261 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000000_0' done. -2017-02-18 08:56:57,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000000_0 -2017-02-18 08:56:57,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000001_0 -2017-02-18 08:56:57,270 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:56:57,271 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:56:57,273 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 08:56:57,664 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:56:57,669 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:56:57,670 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:56:57,670 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:56:57,671 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:56:57,678 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:56:57,702 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:56:58,254 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 08:57:01,849 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 08:57:01,863 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 08:57:02,264 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 08:57:03,300 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:57:04,281 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 08:57:06,304 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:57:07,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:07,567 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:07,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:07,728 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:08,017 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:08,021 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:08,230 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:08,232 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:08,382 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:08,388 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:08,528 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:08,548 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:08,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:08,700 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:08,852 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:08,893 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:09,003 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:09,020 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:09,263 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:09,270 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:09,309 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:57:09,394 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 08:57:09,411 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 08:57:09,416 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 08:57:09,419 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000001_0' done. -2017-02-18 08:57:09,420 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000001_0 -2017-02-18 08:57:09,421 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000002_0 -2017-02-18 08:57:09,430 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:09,431 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:09,432 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 08:57:09,807 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 08:57:09,823 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 08:57:09,836 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 08:57:09,850 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 08:57:10,305 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 08:57:12,866 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 08:57:13,315 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 08:57:15,444 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:57:16,322 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 08:57:17,038 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:17,042 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:17,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:17,180 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:17,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:17,341 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:17,558 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:17,563 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:17,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:17,684 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:17,820 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:17,847 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:17,931 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:17,969 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:18,111 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:18,127 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:18,225 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:18,239 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:18,449 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 08:57:18,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 08:57:18,472 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 08:57:18,520 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 08:57:18,583 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 08:57:18,590 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 08:57:18,594 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000002_0' done. -2017-02-18 08:57:18,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000002_0 -2017-02-18 08:57:18,597 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 08:57:18,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 08:57:18,692 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000000_0 -2017-02-18 08:57:18,737 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:18,738 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:18,760 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3a6d125b -2017-02-18 08:57:18,891 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:18,919 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:19,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,290 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000001_0 -2017-02-18 08:57:19,296 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:19,297 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:19,297 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3115d34f -2017-02-18 08:57:19,298 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,299 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,301 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,308 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:19,315 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,317 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,339 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,340 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 08:57:19,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,358 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,349 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:19,363 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,375 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000002_0 -2017-02-18 08:57:19,384 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:19,385 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:19,385 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f3ed32a -2017-02-18 08:57:19,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,393 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,402 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:19,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,422 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,424 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,434 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:19,424 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,443 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,443 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,444 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,447 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,458 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,459 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,445 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,463 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,468 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000003_0 -2017-02-18 08:57:19,482 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,483 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,483 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,485 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,487 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,495 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,494 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:19,496 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:19,496 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4edb440a -2017-02-18 08:57:19,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,500 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,488 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,516 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,518 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,526 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,528 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,529 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,520 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,547 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,547 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,548 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,550 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,550 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,551 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,551 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:19,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,554 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,555 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,567 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,568 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,570 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,576 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,603 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,605 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,599 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,618 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,618 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,619 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,620 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,621 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,622 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,622 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,582 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:19,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,624 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000004_0 -2017-02-18 08:57:19,652 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,665 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,666 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,653 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:19,674 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:19,674 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@448d87fc -2017-02-18 08:57:19,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,685 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,687 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,687 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,691 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:19,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,694 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,694 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,685 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,697 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,698 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,698 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,700 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,714 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,715 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,714 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,713 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,735 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,737 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,709 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,738 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,729 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:19,739 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,754 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,756 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,750 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,781 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,785 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,783 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,790 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,791 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,792 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,792 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,782 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,793 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,786 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,785 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000005_0 -2017-02-18 08:57:19,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,804 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,826 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,821 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,833 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,835 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,818 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,837 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,837 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,838 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,838 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,839 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,839 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,817 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,833 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:19,845 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:19,846 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68f559e5 -2017-02-18 08:57:19,847 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,847 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,863 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,865 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,862 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:19,855 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,881 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,881 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,852 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,849 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,889 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,891 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,893 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,893 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,895 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,895 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,896 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,897 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:19,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,896 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,918 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,918 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,919 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,919 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,921 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,921 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,922 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,922 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,911 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,951 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,953 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,953 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,954 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,955 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,956 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,957 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,959 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,959 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,961 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,961 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,962 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:19,962 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:19,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,011 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,011 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:19,965 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,012 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,013 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,014 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,015 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,016 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,017 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,017 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,019 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,019 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,020 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,020 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,021 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,021 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:19,964 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,023 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:19,912 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,025 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,026 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,029 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,031 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:19,933 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:19,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,063 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,064 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,065 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,065 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,067 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,067 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,068 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,068 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,069 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,069 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,070 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,072 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,113 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,115 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,075 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,116 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000006_0 -2017-02-18 08:57:20,074 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,073 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,073 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,140 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,142 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,142 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,140 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,144 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,145 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,147 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,147 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,148 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,148 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,149 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,149 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,139 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,151 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,152 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,138 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,166 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,167 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,138 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,169 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,138 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:20,170 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:20,170 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@35fbcbad -2017-02-18 08:57:20,126 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,171 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,153 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,172 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,173 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,176 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:20,187 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,190 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,191 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,190 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,204 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,205 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,206 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,207 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,207 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,212 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,213 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,215 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,215 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,188 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,223 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:20,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,238 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,237 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,243 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,245 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,245 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,246 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,248 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,248 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,249 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,249 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,250 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,250 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,252 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,236 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,327 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,328 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,330 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,331 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,333 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,335 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,301 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,300 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,252 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,338 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,339 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000007_0 -2017-02-18 08:57:20,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,342 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,343 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,345 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,346 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,360 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:20,377 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:20,377 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4e2d66d5 -2017-02-18 08:57:20,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,383 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,384 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,385 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,385 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,387 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,388 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,391 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,389 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,411 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,411 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,414 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,401 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:20,400 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,415 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,416 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,416 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,417 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,418 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,399 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,398 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,427 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,428 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,468 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,470 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,462 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:20,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,487 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,489 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,489 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,490 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,490 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,492 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,494 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,494 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,495 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,495 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,497 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,497 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,498 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,499 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,500 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,501 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,501 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,503 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,459 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,504 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,506 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,506 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,507 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,507 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,512 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,458 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,515 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,515 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,517 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,517 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,518 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,519 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,477 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:57:20,471 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,554 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,556 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,556 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,557 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,560 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,562 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,563 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000008_0 -2017-02-18 08:57:20,565 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:57:20,567 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,542 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,581 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,534 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,582 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,519 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,580 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:20,584 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:20,585 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@239de86 -2017-02-18 08:57:20,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,586 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:57:20,592 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,608 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,609 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,611 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,611 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,612 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,589 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,587 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,608 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:20,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:57:20,606 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,627 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,633 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,633 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,641 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,642 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,644 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,668 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,654 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:20,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,697 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,701 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,649 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,702 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,704 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,704 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,705 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,705 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,707 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,707 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,648 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,647 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,647 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:57:20,645 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,645 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,711 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,712 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 08:57:20,714 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000009_0 -2017-02-18 08:57:20,718 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,723 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 08:57:20,724 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 08:57:20,725 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@236e4a57 -2017-02-18 08:57:20,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 08:57:20,736 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,746 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,746 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,748 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,779 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,776 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 08:57:20,767 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,813 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,815 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,767 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,816 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,766 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,817 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,765 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,818 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:57:20,764 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,818 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,763 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,819 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,763 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,820 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 08:57:20,762 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,821 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,786 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 22 len: 26 to MEMORY -2017-02-18 08:57:20,824 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,830 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,831 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 08:57:20,833 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,834 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,835 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,837 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,866 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 08:57:20,866 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,848 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 08:57:20,847 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,846 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 08:57:20,845 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 08:57:20,845 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 08:57:20,843 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,877 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 08:57:20,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 08:57:20,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 08:57:20,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 08:57:20,879 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 08:57:20,880 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 22 len: 26 to MEMORY -2017-02-18 08:57:20,893 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1323619014_0001 -java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 - at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) -Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 - at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) - at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) - at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.io.IOException: not a gzip file - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) - at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) - at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) - at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) - at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) -2017-02-18 08:57:21,357 INFO org.apache.hadoop.mapreduce.Job: Job job_local1323619014_0001 failed with state FAILED due to: NA -2017-02-18 08:57:21,653 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 - File System Counters - FILE: Number of bytes read=63678066 - FILE: Number of bytes written=838861 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=1471 - Input split bytes=351 - Combine input records=4678719 - Combine output records=131 - Spilled Records=131 - Failed Shuffles=0 - Merged Map outputs=0 - GC time elapsed (ms)=740 - Total committed heap usage (bytes)=576008192 - File Input Format Counters - Bytes Read=26057874 -2017-02-18 09:08:49,683 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 09:08:50,130 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress -2017-02-18 09:08:50,137 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec -2017-02-18 09:08:51,720 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 09:08:51,730 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 09:08:53,605 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 09:08:53,644 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 09:08:54,021 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 09:08:55,329 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local996311227_0001 -2017-02-18 09:08:57,090 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 09:08:57,092 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local996311227_0001 -2017-02-18 09:08:57,103 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 09:08:57,143 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:08:57,159 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 09:08:57,575 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 09:08:57,576 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000000_0 -2017-02-18 09:08:57,771 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:08:57,879 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:08:57,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 09:08:58,340 INFO org.apache.hadoop.mapreduce.Job: Job job_local996311227_0001 running in uber mode : false -2017-02-18 09:08:58,347 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 09:08:58,596 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:08:58,596 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:08:58,597 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:08:58,610 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:08:58,611 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:08:58,647 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:08:58,679 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:09:03,869 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:09:04,390 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% -2017-02-18 09:09:06,885 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:09:07,419 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% -2017-02-18 09:09:09,890 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:09:10,424 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% -2017-02-18 09:09:12,894 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:09:13,244 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:09:13,250 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:09:13,251 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:09:13,252 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 09:09:13,252 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 09:09:13,444 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 09:09:15,897 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:18,901 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:21,904 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:24,905 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:27,909 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:30,913 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:31,712 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:31,734 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:33,465 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:33,473 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:33,917 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:34,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:34,188 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:35,151 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:35,178 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:35,531 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:35,550 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:35,999 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:36,009 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:36,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:36,392 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:36,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:36,927 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:36,939 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:37,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:37,323 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:38,170 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:38,184 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:38,475 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:09:38,501 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 09:09:38,509 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:09:38,519 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000000_0' done. -2017-02-18 09:09:38,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000000_0 -2017-02-18 09:09:38,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000001_0 -2017-02-18 09:09:38,528 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:09:38,529 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:09:38,530 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 09:09:38,533 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:09:38,948 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:09:38,955 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:09:38,956 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:09:38,956 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:09:38,957 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:09:38,963 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:09:38,973 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:09:43,485 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:09:43,506 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 09:09:43,562 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 09:09:44,574 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:45,580 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 09:09:47,578 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:49,441 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:49,455 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:49,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:49,639 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:49,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:49,941 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:50,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:50,188 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:50,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:50,368 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:50,504 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:50,519 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:50,582 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:50,676 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:50,676 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:50,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:50,936 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:51,060 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:51,065 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:51,325 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:51,334 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:51,476 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:09:51,490 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 09:09:51,503 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:09:51,515 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000001_0' done. -2017-02-18 09:09:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000001_0 -2017-02-18 09:09:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000002_0 -2017-02-18 09:09:51,529 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:09:51,530 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:09:51,543 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 09:09:51,816 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:09:51,966 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:09:51,975 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:09:51,976 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:09:51,976 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:09:51,977 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:09:51,982 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:09:51,984 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:09:54,908 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:09:54,916 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:09:54,917 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:09:54,917 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 09:09:54,918 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 09:09:55,827 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 09:09:57,551 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:09:57,840 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 09:09:59,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:59,121 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:59,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:59,253 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:59,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:59,416 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:59,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:59,606 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:59,714 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:59,730 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:59,861 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:59,865 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:09:59,988 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:09:59,989 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:10:00,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:10:00,146 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:10:00,228 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:10:00,241 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:10:00,468 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:10:00,470 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:10:00,518 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:10:00,555 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:10:00,584 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 09:10:00,586 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:10:00,586 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000002_0' done. -2017-02-18 09:10:00,587 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000002_0 -2017-02-18 09:10:00,587 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 09:10:00,680 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 09:10:00,681 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000000_0 -2017-02-18 09:10:00,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:00,728 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:00,749 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@10c8fecc -2017-02-18 09:10:00,843 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:10:00,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:00,891 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:01,153 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,253 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000001_0 -2017-02-18 09:10:01,258 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:01,259 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:01,260 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@553beecf -2017-02-18 09:10:01,262 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,262 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,264 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,272 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,272 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:01,274 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,284 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,286 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,295 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,296 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,304 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,306 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,302 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,290 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:01,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,327 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000002_0 -2017-02-18 09:10:01,336 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:01,337 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:01,337 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@605afad3 -2017-02-18 09:10:01,343 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:01,344 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,348 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,349 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,355 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,366 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,366 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,383 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,378 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,376 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:01,404 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,405 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,406 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,385 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,408 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000003_0 -2017-02-18 09:10:01,421 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:01,423 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,424 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,424 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,426 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,426 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,427 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,427 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,428 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,428 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,430 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,431 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,433 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,434 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,434 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,437 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,438 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,440 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,446 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:01,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@673f2af1 -2017-02-18 09:10:01,455 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,456 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:01,464 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,466 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,474 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,508 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,481 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:01,513 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,516 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,517 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000004_0 -2017-02-18 09:10:01,521 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,543 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,544 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,536 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:01,558 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:01,558 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3a668605 -2017-02-18 09:10:01,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,549 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,562 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,564 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,564 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,565 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,575 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,568 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,582 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,582 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,584 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,584 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,585 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,585 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,586 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,586 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,566 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,588 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,589 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,589 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,591 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,592 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,594 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,615 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,617 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,595 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,627 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,629 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,629 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,630 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,631 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,627 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:01,637 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,638 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,640 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,659 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,653 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,661 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,663 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,679 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,648 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,642 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,683 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,683 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,690 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,697 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,696 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000005_0 -2017-02-18 09:10:01,696 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,710 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,729 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,729 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,693 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,731 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,733 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,734 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,734 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,735 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,737 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,738 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,739 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,739 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:01,744 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,745 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,791 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,796 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,797 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,795 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,798 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,799 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,794 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,799 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,800 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,801 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,793 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:01,802 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6349766d -2017-02-18 09:10:01,792 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,803 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,804 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,805 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,806 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:01,819 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,820 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,808 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,832 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,833 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,807 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,807 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,807 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,844 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,844 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,846 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,846 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,852 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,854 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,868 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,869 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,870 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,867 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:01,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,882 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,882 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,883 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,885 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,885 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,886 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,878 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,870 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,907 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:01,909 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,909 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:01,910 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:01,911 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000006_0 -2017-02-18 09:10:01,912 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,913 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,925 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,926 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,927 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,928 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,929 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,930 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,930 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:01,936 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,936 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,938 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,940 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,940 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,942 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,943 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,943 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,944 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,945 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,946 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,949 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,949 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,956 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:01,956 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:01,957 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4260af0b -2017-02-18 09:10:01,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:01,968 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:01,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,983 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:01,984 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,976 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:01,975 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:01,974 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,992 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,995 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:01,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,997 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:01,993 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,000 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,002 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:01,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,026 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,015 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,029 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,011 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:02,027 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,033 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,042 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,047 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,074 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,057 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,056 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,052 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,050 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,049 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,049 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000007_0 -2017-02-18 09:10:02,077 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,095 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,096 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,102 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,108 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,094 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,115 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,118 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,120 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,092 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,123 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,125 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,126 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,126 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,127 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,114 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,173 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,174 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,099 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,175 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,098 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,177 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,177 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,178 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,180 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,156 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:02,181 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:02,182 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@74eef9db -2017-02-18 09:10:02,183 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,184 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,192 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:02,193 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,194 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,204 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,205 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,205 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,206 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,210 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,188 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,186 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,185 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,213 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,199 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,226 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:02,256 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,258 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,267 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,264 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,268 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,269 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,263 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,269 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,262 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,271 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,273 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,273 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,274 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,274 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,260 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,275 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,259 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,275 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,276 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000008_0 -2017-02-18 09:10:02,282 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,283 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,294 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:02,304 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:02,305 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@eb9012c -2017-02-18 09:10:02,307 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,316 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,318 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,318 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,337 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,316 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,362 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,364 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,364 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,365 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,315 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,368 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,370 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,313 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,372 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,373 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,374 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,374 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,375 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,376 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,377 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,312 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,379 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,380 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,382 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,309 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:02,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,387 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,389 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,389 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,391 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,392 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,394 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,407 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,410 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,403 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,434 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,434 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,438 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,442 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,443 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,444 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,430 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,425 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,414 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:02,451 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,452 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,453 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,453 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,454 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,454 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,456 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,456 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,459 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,459 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,460 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,537 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,537 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,541 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,541 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,541 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,542 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,542 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,544 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,570 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,590 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,554 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,594 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,553 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,595 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,596 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,596 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,597 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,598 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,599 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,599 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,600 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,601 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,553 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,601 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,603 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,603 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,604 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,606 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,606 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,552 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,549 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,548 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000009_0 -2017-02-18 09:10:02,548 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,609 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,545 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,594 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,642 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:10:02,643 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:10:02,643 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@45b028db -2017-02-18 09:10:02,657 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:10:02,662 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,669 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,671 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,673 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,674 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,675 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,675 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,677 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,678 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,665 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,664 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,663 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,691 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,692 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,716 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,724 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,725 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,723 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,726 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,728 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,722 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,728 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,721 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,733 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,751 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,753 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,753 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,755 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,755 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,756 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,756 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,758 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,758 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,759 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,759 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,718 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,761 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,717 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,763 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,765 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,765 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,766 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,766 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,768 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,768 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,769 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,769 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,771 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,726 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,833 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY -2017-02-18 09:10:02,841 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:10:02,842 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,844 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,845 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,856 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,857 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 09:10:02,849 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,871 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY -2017-02-18 09:10:02,855 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,872 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY -2017-02-18 09:10:02,854 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,872 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY -2017-02-18 09:10:02,853 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,873 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY -2017-02-18 09:10:02,851 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,873 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY -2017-02-18 09:10:02,851 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,874 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY -2017-02-18 09:10:02,850 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,874 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:10:02,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY -2017-02-18 09:10:02,876 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local996311227_0001 -java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 - at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) -Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 - at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) - at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) - at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.io.IOException: not a gzip file - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) - at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) - at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) - at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) - at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) -2017-02-18 09:10:03,867 INFO org.apache.hadoop.mapreduce.Job: Job job_local996311227_0001 failed with state FAILED due to: NA -2017-02-18 09:10:04,136 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 - File System Counters - FILE: Number of bytes read=63678066 - FILE: Number of bytes written=834451 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=1471 - Input split bytes=351 - Combine input records=4678719 - Combine output records=131 - Spilled Records=131 - Failed Shuffles=0 - Merged Map outputs=0 - GC time elapsed (ms)=847 - Total committed heap usage (bytes)=576008192 - File Input Format Counters - Bytes Read=26057874 -2017-02-18 09:26:49,408 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 09:26:49,749 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress -2017-02-18 09:26:49,775 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec -2017-02-18 09:26:51,050 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 09:26:51,065 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 09:26:52,559 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 09:26:52,648 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 09:26:52,917 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 09:26:53,914 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local751599384_0001 -2017-02-18 09:26:55,334 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 09:26:55,335 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local751599384_0001 -2017-02-18 09:26:55,353 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 09:26:55,413 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:26:55,421 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 09:26:55,831 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 09:26:55,832 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000000_0 -2017-02-18 09:26:56,054 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:26:56,169 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:26:56,178 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 09:26:56,622 INFO org.apache.hadoop.mapreduce.Job: Job job_local751599384_0001 running in uber mode : false -2017-02-18 09:26:56,624 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:26:56,956 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:26:57,006 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:27:02,132 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:27:02,662 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% -2017-02-18 09:27:05,153 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:27:05,670 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% -2017-02-18 09:27:08,158 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:27:08,237 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:27:08,244 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:27:08,245 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:27:08,245 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 09:27:08,246 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 09:27:08,689 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 09:27:11,159 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:14,169 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:17,178 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:20,180 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:22,469 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:22,475 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:23,187 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:24,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:24,134 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:24,753 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:24,755 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:25,552 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:25,553 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:25,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:25,892 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:26,203 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:26,249 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:26,269 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:26,579 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:26,583 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:26,992 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:27,005 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:27,304 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:27,308 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:27,911 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:27,916 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:28,128 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:27:28,155 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 09:27:28,161 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:27:28,164 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000000_0' done. -2017-02-18 09:27:28,166 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000000_0 -2017-02-18 09:27:28,167 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000001_0 -2017-02-18 09:27:28,174 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:28,175 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:28,177 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 09:27:28,463 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:27:28,474 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:27:28,475 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:27:28,475 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:27:28,476 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:27:28,482 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:27:28,494 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:27:28,789 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:27:31,526 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:27:31,534 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:27:31,535 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:27:31,535 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 09:27:31,536 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 09:27:31,805 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 09:27:34,203 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:34,817 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 09:27:35,818 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:35,836 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:35,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:35,952 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:36,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:36,176 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:36,302 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:36,330 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:36,439 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:36,444 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:36,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:36,573 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:36,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:36,693 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:36,829 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:36,837 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:36,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:36,932 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:37,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:37,127 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:37,198 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:27:37,205 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:37,217 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 09:27:37,222 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:27:37,224 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000001_0' done. -2017-02-18 09:27:37,225 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000001_0 -2017-02-18 09:27:37,226 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000002_0 -2017-02-18 09:27:37,233 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:37,238 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:37,250 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 09:27:37,543 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:27:37,547 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:27:37,554 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:27:37,556 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:27:37,828 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:27:39,830 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:27:39,852 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:27:39,853 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:27:39,854 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 09:27:39,854 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 09:27:40,851 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 09:27:42,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:42,902 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,006 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:43,018 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,164 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:43,164 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,270 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:27:43,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:43,372 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,467 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:43,482 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,607 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:43,635 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,736 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:43,746 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,863 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 09:27:43,872 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:43,896 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:43,981 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:44,003 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:44,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] -2017-02-18 09:27:44,209 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool -2017-02-18 09:27:44,256 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:27:44,279 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 09:27:44,285 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:27:44,288 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000002_0' done. -2017-02-18 09:27:44,289 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000002_0 -2017-02-18 09:27:44,290 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 09:27:44,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 09:27:44,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000000_0 -2017-02-18 09:27:44,412 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:44,414 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:44,442 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@53133050 -2017-02-18 09:27:44,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:44,568 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:44,731 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,763 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,802 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000001_0 -2017-02-18 09:27:44,808 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:44,809 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:44,809 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2ee007c9 -2017-02-18 09:27:44,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:44,814 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,816 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,816 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,817 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,820 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,827 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,837 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,837 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:44,853 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,860 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,866 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:27:44,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,872 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000002_0 -2017-02-18 09:27:44,874 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,877 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,884 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,887 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,887 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,888 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,890 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,891 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,884 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:44,893 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:44,893 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2912ee3c -2017-02-18 09:27:44,896 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,897 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,903 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:44,904 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,905 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,911 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,919 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,929 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,927 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:44,924 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,939 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,940 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,940 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,941 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,944 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,933 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:44,961 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000003_0 -2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@584ca76 -2017-02-18 09:27:44,971 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,972 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:44,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,977 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,995 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,996 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,996 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,998 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,999 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:44,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,000 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,001 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,002 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:44,977 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:44,977 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,004 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,005 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,005 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,006 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,007 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,008 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,008 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,009 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,009 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,010 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,011 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,012 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,055 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,056 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,055 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,033 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:45,013 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,058 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,059 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,060 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000004_0 -2017-02-18 09:27:45,061 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,070 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,068 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7490b4d0 -2017-02-18 09:27:45,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,077 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,080 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,080 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,081 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,078 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,086 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,089 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,090 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,100 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,101 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,091 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:45,108 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,094 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,093 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,109 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,110 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,111 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,111 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,112 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,114 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,114 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,131 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,134 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,124 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,140 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,140 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,141 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,145 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,135 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,147 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,148 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,158 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,157 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,176 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,156 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,154 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:45,150 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,178 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,179 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,180 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,182 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,174 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,202 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000005_0 -2017-02-18 09:27:45,196 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,191 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,190 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,203 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,211 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,222 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,224 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,224 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,225 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,225 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,226 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,226 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,228 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,228 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,229 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,229 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,231 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,232 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,233 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,234 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,235 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,236 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,243 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,251 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,251 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:45,252 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:45,253 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e35379c -2017-02-18 09:27:45,255 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,255 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,257 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,265 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:45,278 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,280 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,280 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,281 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,281 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,282 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,282 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,283 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,284 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,285 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,285 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,286 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,287 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,289 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,289 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,290 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,305 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:45,300 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,328 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,330 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,330 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,331 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,299 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,332 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,334 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,334 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,335 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,337 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,291 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,338 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,340 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,341 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,341 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,291 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,353 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,358 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,356 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000006_0 -2017-02-18 09:27:45,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,373 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,355 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,385 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,384 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,389 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,391 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,384 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,394 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,396 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,396 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,397 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,398 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,400 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,401 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,401 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,383 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:45,369 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,403 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,403 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,410 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,417 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,425 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,422 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,432 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,432 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,421 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:45,432 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@43e0ff38 -2017-02-18 09:27:45,420 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,435 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,418 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,429 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,439 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:45,464 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,464 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,474 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,474 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,475 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,475 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,477 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,477 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,478 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,478 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,479 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,480 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,481 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,481 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,463 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,484 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,462 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,485 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,485 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,507 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:45,537 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,546 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,548 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,550 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,550 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,552 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,571 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,571 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,574 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,584 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,605 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,607 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,608 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,608 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,610 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,611 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,611 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,577 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,612 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,613 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,613 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,576 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,616 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,617 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,617 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,619 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,620 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,620 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,622 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,575 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,673 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,575 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,675 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,676 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,676 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,679 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,682 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000007_0 -2017-02-18 09:27:45,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,686 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,637 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,634 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,692 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,693 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,696 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,696 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,632 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,698 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,700 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,700 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,701 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,690 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:45,703 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:45,703 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@77e7c326 -2017-02-18 09:27:45,686 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,705 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,706 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,707 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,706 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,709 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,712 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,713 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,720 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,721 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,720 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,741 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,717 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:45,713 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,743 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,744 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,755 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,775 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,777 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,762 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,785 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,786 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,787 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,787 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,788 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,790 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,790 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,791 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,791 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,761 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,792 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,793 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,760 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,758 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,757 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,797 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,784 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,797 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,798 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,783 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:45,880 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,861 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,888 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,890 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,917 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:45,918 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000008_0 -2017-02-18 09:27:45,920 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,920 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,884 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,922 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,925 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,928 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,928 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,929 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,929 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,931 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,931 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,932 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:45,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,904 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,935 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,939 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,941 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:45,935 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,952 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,960 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,961 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,962 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:45,964 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,956 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,972 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:45,955 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:45,975 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:45,975 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7659657a -2017-02-18 09:27:45,955 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,976 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,977 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,977 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,979 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,980 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,980 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:45,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,982 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,983 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,983 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,985 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:45,952 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,987 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,988 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,989 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,989 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,991 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,991 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:45,968 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,992 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:45,963 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:45,963 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:45,993 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:45,993 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:45,993 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:46,023 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,034 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,034 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,036 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,028 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,036 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,037 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,038 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,039 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,039 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,040 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,041 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,042 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,043 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,045 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,045 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,025 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,046 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,048 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,048 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,049 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,049 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,051 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,051 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,052 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,053 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,054 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,059 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,060 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,075 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,105 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,104 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,116 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,103 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,121 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,121 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,102 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,124 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,124 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,125 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,127 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,127 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,128 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,099 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,080 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:46,107 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,175 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,181 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,184 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,184 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,186 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,186 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,187 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,187 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,189 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,194 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,194 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,195 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,196 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,197 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,199 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,199 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,200 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,202 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,176 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,254 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,256 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,256 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,257 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,258 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,258 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,260 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,260 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,261 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,175 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,235 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,263 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,223 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,263 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,264 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000009_0 -2017-02-18 09:27:46,267 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,286 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:27:46,287 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:27:46,287 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@18084038 -2017-02-18 09:27:46,291 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:27:46,305 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:27:46,320 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,320 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,322 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,323 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,325 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,325 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,327 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,333 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,345 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,352 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,353 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,353 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,354 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,354 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,356 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,358 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,360 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,360 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,361 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,362 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,363 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,363 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,364 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,366 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,366 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,367 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,368 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,370 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,348 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,346 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,413 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,373 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,413 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,414 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,415 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,415 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,416 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,416 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,420 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,421 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,511 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,518 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY -2017-02-18 09:27:46,522 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,523 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,524 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,524 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,525 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY -2017-02-18 09:27:46,520 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,526 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 22 len: 26 to MEMORY -2017-02-18 09:27:46,527 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 09:27:46,530 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local751599384_0001 -java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 - at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) -Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 - at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) - at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) - at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -Caused by: java.io.IOException: not a gzip file - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) - at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) - at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) - at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) - at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) - at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) - at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) -2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,557 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY -2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY -2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY -2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY -2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY -2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY -2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] -2017-02-18 09:27:46,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY -2017-02-18 09:27:46,912 INFO org.apache.hadoop.mapreduce.Job: Job job_local751599384_0001 failed with state FAILED due to: NA -2017-02-18 09:27:47,155 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 - File System Counters - FILE: Number of bytes read=63678066 - FILE: Number of bytes written=834451 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=1471 - Input split bytes=351 - Combine input records=4678719 - Combine output records=131 - Spilled Records=131 - Failed Shuffles=0 - Merged Map outputs=0 - GC time elapsed (ms)=663 - Total committed heap usage (bytes)=576008192 - File Input Format Counters - Bytes Read=26057874 -2017-02-18 09:32:52,271 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 09:32:52,676 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress -2017-02-18 09:32:52,678 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec -2017-02-18 09:32:53,961 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 09:32:53,971 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 09:32:55,424 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 09:32:55,476 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 09:32:55,773 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 09:32:56,770 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local963140535_0001 -2017-02-18 09:32:58,243 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 09:32:58,245 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local963140535_0001 -2017-02-18 09:32:58,257 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 09:32:58,292 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:32:58,293 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 09:32:58,640 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 09:32:58,641 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000000_0 -2017-02-18 09:32:58,823 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:32:58,889 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:32:58,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 09:32:59,248 INFO org.apache.hadoop.mapreduce.Job: Job job_local963140535_0001 running in uber mode : false -2017-02-18 09:32:59,269 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 09:32:59,522 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:32:59,539 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:32:59,569 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:32:59,582 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:33:04,889 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:33:05,319 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% -2017-02-18 09:33:07,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:33:08,340 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% -2017-02-18 09:33:10,903 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:33:10,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:33:10,937 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:33:10,938 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:33:10,939 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 09:33:10,939 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 09:33:11,347 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 09:33:13,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:16,917 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:19,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:22,930 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:25,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:27,123 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 14765620(59062480) -2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 09:33:28,944 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:31,946 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:34,957 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:37,968 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:40,042 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@f6a4c4a -java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 09:33:40,081 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000001_0 -2017-02-18 09:33:40,091 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:33:40,091 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:33:40,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 09:33:40,579 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:33:40,589 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:33:40,592 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:33:40,594 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:33:40,595 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:33:40,602 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:33:40,611 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:33:40,971 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:43,769 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 09:33:43,979 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:46,116 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:46,528 INFO org.apache.hadoop.mapreduce.Job: map 44% reduce 0% -2017-02-18 09:33:47,986 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:33:47,992 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 22120620(88482480) -2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 09:33:49,120 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:51,563 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@e5c48f1 -java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 09:33:51,577 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000002_0 -2017-02-18 09:33:51,588 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:33:51,595 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:33:51,599 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 09:33:51,954 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:33:51,962 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:33:51,962 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:33:51,963 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:33:51,963 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:33:51,973 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:33:51,974 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:33:52,131 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:54,795 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 09:33:55,134 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:57,609 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:33:57,930 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 09:33:57,940 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:33:57,945 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 23042072(92168288) -2017-02-18 09:33:57,946 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:33:57,947 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 09:33:57,947 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 09:34:00,613 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:34:00,657 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@2d14b355 -java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 09:34:00,664 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 09:34:00,673 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local963140535_0001 -java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) - at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) -Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. - at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) - at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) - at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) - at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) - at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) - at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) - at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) - at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) - at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) - at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) - at java.util.concurrent.FutureTask.run(FutureTask.java:262) - at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) - at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) - at java.lang.Thread.run(Thread.java:745) -2017-02-18 09:34:00,943 INFO org.apache.hadoop.mapreduce.Job: Job job_local963140535_0001 failed with state FAILED due to: NA -2017-02-18 09:34:01,055 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 - File System Counters - FILE: Number of bytes read=73722745 - FILE: Number of bytes written=829530 - FILE: Number of read operations=0 - FILE: Number of large read operations=0 - FILE: Number of write operations=0 - Map-Reduce Framework - Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=0 - Input split bytes=351 - Combine input records=0 - Combine output records=0 - Spilled Records=0 - Failed Shuffles=0 - Merged Map outputs=0 - GC time elapsed (ms)=1944 - Total committed heap usage (bytes)=1413292032 - File Input Format Counters - Bytes Read=26057874 -2017-02-18 09:36:42,891 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 09:36:43,290 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress -2017-02-18 09:36:43,295 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec -2017-02-18 09:36:44,625 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 09:36:44,637 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 09:36:46,271 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 09:36:46,307 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 09:36:46,597 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 09:36:47,633 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2056867727_0001 -2017-02-18 09:36:49,105 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 09:36:49,107 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2056867727_0001 -2017-02-18 09:36:49,118 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 09:36:49,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:36:49,169 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 09:36:49,480 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 09:36:49,483 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:36:49,685 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:36:49,784 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:36:49,807 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 09:36:50,167 INFO org.apache.hadoop.mapreduce.Job: Job job_local2056867727_0001 running in uber mode : false -2017-02-18 09:36:50,176 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 09:36:50,435 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:36:50,458 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:36:50,459 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:36:50,460 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:36:50,460 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:36:50,487 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:36:50,528 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:36:55,778 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:36:56,207 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% -2017-02-18 09:36:58,787 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:36:59,218 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% -2017-02-18 09:37:01,669 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 09:37:01,789 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:02,230 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 09:37:04,793 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:07,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:10,807 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:13,808 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:16,815 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:17,664 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.bz2] -2017-02-18 09:37:19,819 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:22,831 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:23,735 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:37:23,769 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 09:37:23,775 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000000_0' done. -2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:23,787 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:23,789 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:23,793 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 09:37:24,086 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:37:24,101 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:37:24,115 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:37:24,384 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:37:27,137 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:37:27,148 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 09:37:27,402 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 09:37:29,799 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:30,413 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 09:37:32,802 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:33,153 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:37:33,163 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 09:37:33,168 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:37:33,171 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000001_0' done. -2017-02-18 09:37:33,172 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:33,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:33,179 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:33,180 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:33,187 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 09:37:33,424 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:37:33,475 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:37:33,481 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:37:33,487 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:37:33,489 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:37:33,489 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:37:33,497 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:37:33,499 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:37:35,611 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:37:35,620 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:37:35,621 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:37:35,622 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 09:37:35,622 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 09:37:36,442 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 09:37:39,198 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:37:39,462 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 09:37:40,255 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:37:40,263 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 09:37:40,269 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:37:40,272 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000002_0' done. -2017-02-18 09:37:40,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:40,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 09:37:40,342 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 09:37:40,343 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000000_0 -2017-02-18 09:37:40,390 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:40,391 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:40,405 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7018394a -2017-02-18 09:37:40,468 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:37:40,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:40,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.bz2] -2017-02-18 09:37:40,703 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 61 len: 103 to MEMORY -2017-02-18 09:37:40,740 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:40,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 -2017-02-18 09:37:40,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 30 len: 78 to MEMORY -2017-02-18 09:37:40,785 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:40,789 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->91 -2017-02-18 09:37:40,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 31 len: 82 to MEMORY -2017-02-18 09:37:40,800 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:40,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 91, usedMemory ->122 -2017-02-18 09:37:40,817 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:40,818 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:40,819 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:40,851 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:40,865 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes -2017-02-18 09:37:40,909 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:40,921 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 153 bytes from disk -2017-02-18 09:37:40,926 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:40,931 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:40,935 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes -2017-02-18 09:37:40,938 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:40,984 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 09:37:40,987 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 09:37:40,988 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:40,988 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000000_0 is allowed to commit now -2017-02-18 09:37:40,989 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000000 -2017-02-18 09:37:41,004 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:41,008 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000000_0' done. -2017-02-18 09:37:41,009 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000000_0 -2017-02-18 09:37:41,010 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000001_0 -2017-02-18 09:37:41,018 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:41,019 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:41,020 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@21fdb35f -2017-02-18 09:37:41,027 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:41,048 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:41,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 154 len: 171 to MEMORY -2017-02-18 09:37:41,065 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:41,071 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 -2017-02-18 09:37:41,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 69 len: 110 to MEMORY -2017-02-18 09:37:41,084 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:41,088 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->223 -2017-02-18 09:37:41,094 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 39 len: 85 to MEMORY -2017-02-18 09:37:41,100 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:41,114 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 3, commitMemory -> 223, usedMemory ->262 -2017-02-18 09:37:41,117 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:41,117 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:41,119 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:41,119 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes -2017-02-18 09:37:41,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:41,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 238 bytes from disk -2017-02-18 09:37:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:41,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:41,155 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes -2017-02-18 09:37:41,159 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,201 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 09:37:41,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,204 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000001_0 is allowed to commit now -2017-02-18 09:37:41,208 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000001 -2017-02-18 09:37:41,215 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:41,216 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000001_0' done. -2017-02-18 09:37:41,216 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000001_0 -2017-02-18 09:37:41,217 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000002_0 -2017-02-18 09:37:41,237 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:41,238 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:41,238 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@16f5d08e -2017-02-18 09:37:41,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:41,259 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:41,268 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 112 len: 146 to MEMORY -2017-02-18 09:37:41,277 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:41,286 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 -2017-02-18 09:37:41,290 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 41 len: 81 to MEMORY -2017-02-18 09:37:41,299 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->153 -2017-02-18 09:37:41,306 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 51 len: 94 to MEMORY -2017-02-18 09:37:41,313 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:41,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 3, commitMemory -> 153, usedMemory ->204 -2017-02-18 09:37:41,319 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:41,321 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,322 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:41,323 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:41,323 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes -2017-02-18 09:37:41,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:41,374 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 205 bytes from disk -2017-02-18 09:37:41,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:41,378 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:41,380 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes -2017-02-18 09:37:41,385 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,423 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 09:37:41,433 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,434 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000002_0 is allowed to commit now -2017-02-18 09:37:41,439 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000002 -2017-02-18 09:37:41,450 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:41,454 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000002_0' done. -2017-02-18 09:37:41,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000002_0 -2017-02-18 09:37:41,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000003_0 -2017-02-18 09:37:41,466 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:41,472 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:41,472 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 09:37:41,476 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7fae5c75 -2017-02-18 09:37:41,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:41,502 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:41,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 63 len: 105 to MEMORY -2017-02-18 09:37:41,515 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:41,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 -2017-02-18 09:37:41,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:37:41,531 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:41,537 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 -2017-02-18 09:37:41,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 11 len: 59 to MEMORY -2017-02-18 09:37:41,565 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 -2017-02-18 09:37:41,575 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:41,576 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:41,578 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:41,578 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes -2017-02-18 09:37:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:41,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk -2017-02-18 09:37:41,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:41,612 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:41,616 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes -2017-02-18 09:37:41,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,670 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 09:37:41,672 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,673 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000003_0 is allowed to commit now -2017-02-18 09:37:41,678 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000003 -2017-02-18 09:37:41,683 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:41,685 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000003_0' done. -2017-02-18 09:37:41,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000003_0 -2017-02-18 09:37:41,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000004_0 -2017-02-18 09:37:41,695 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:41,696 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:41,696 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2f5fea9c -2017-02-18 09:37:41,711 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:41,723 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:41,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 52 len: 101 to MEMORY -2017-02-18 09:37:41,753 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:41,756 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 -2017-02-18 09:37:41,760 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 33 len: 82 to MEMORY -2017-02-18 09:37:41,776 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:41,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->85 -2017-02-18 09:37:41,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 31 len: 79 to MEMORY -2017-02-18 09:37:41,813 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:41,820 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 85, usedMemory ->116 -2017-02-18 09:37:41,820 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:41,821 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:41,821 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:41,907 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:41,907 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes -2017-02-18 09:37:41,951 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:41,967 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 147 bytes from disk -2017-02-18 09:37:41,968 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:41,968 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:41,969 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes -2017-02-18 09:37:41,976 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,014 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 09:37:42,017 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,029 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000004_0 is allowed to commit now -2017-02-18 09:37:42,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000004 -2017-02-18 09:37:42,040 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000004_0' done. -2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000004_0 -2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000005_0 -2017-02-18 09:37:42,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:42,060 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:42,068 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70b91162 -2017-02-18 09:37:42,077 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:42,107 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:42,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 54 len: 100 to MEMORY -2017-02-18 09:37:42,128 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:42,143 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 -2017-02-18 09:37:42,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 23 len: 69 to MEMORY -2017-02-18 09:37:42,162 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:42,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 -2017-02-18 09:37:42,188 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 23 len: 68 to MEMORY -2017-02-18 09:37:42,194 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:42,195 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 -2017-02-18 09:37:42,203 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:42,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,204 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:42,206 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:42,206 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes -2017-02-18 09:37:42,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:42,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 130 bytes from disk -2017-02-18 09:37:42,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:42,247 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:42,249 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes -2017-02-18 09:37:42,266 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,307 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 09:37:42,313 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,313 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000005_0 is allowed to commit now -2017-02-18 09:37:42,314 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000005 -2017-02-18 09:37:42,326 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:42,349 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000005_0' done. -2017-02-18 09:37:42,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000005_0 -2017-02-18 09:37:42,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000006_0 -2017-02-18 09:37:42,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:42,358 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:42,359 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3d144d62 -2017-02-18 09:37:42,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:42,387 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:42,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 72 len: 119 to MEMORY -2017-02-18 09:37:42,415 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:42,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 -2017-02-18 09:37:42,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 28 len: 75 to MEMORY -2017-02-18 09:37:42,429 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:42,443 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 -2017-02-18 09:37:42,445 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 28 len: 74 to MEMORY -2017-02-18 09:37:42,460 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:42,466 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 -2017-02-18 09:37:42,467 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:42,468 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:42,483 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% -2017-02-18 09:37:42,532 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:42,533 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes -2017-02-18 09:37:42,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 155 bytes from disk -2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:42,593 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes -2017-02-18 09:37:42,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,656 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 09:37:42,658 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,658 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000006_0 is allowed to commit now -2017-02-18 09:37:42,659 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000006 -2017-02-18 09:37:42,673 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:42,673 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000006_0' done. -2017-02-18 09:37:42,674 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000006_0 -2017-02-18 09:37:42,681 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000007_0 -2017-02-18 09:37:42,691 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:42,692 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:42,693 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@a82b79d -2017-02-18 09:37:42,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:42,726 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:42,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 88 len: 120 to MEMORY -2017-02-18 09:37:42,750 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:42,757 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 -2017-02-18 09:37:42,769 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 11 len: 55 to MEMORY -2017-02-18 09:37:42,771 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:42,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->99 -2017-02-18 09:37:42,797 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:37:42,812 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:42,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 99, usedMemory ->101 -2017-02-18 09:37:42,824 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:42,825 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,825 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:42,826 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:42,827 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes -2017-02-18 09:37:42,858 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:42,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 140 bytes from disk -2017-02-18 09:37:42,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:42,878 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:42,879 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes -2017-02-18 09:37:42,888 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,929 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 09:37:42,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:42,931 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000007_0 is allowed to commit now -2017-02-18 09:37:42,934 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000007 -2017-02-18 09:37:42,963 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000007_0' done. -2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000007_0 -2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000008_0 -2017-02-18 09:37:42,972 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:42,973 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:42,973 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@132393b4 -2017-02-18 09:37:42,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:42,996 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:43,008 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 98 len: 134 to MEMORY -2017-02-18 09:37:43,014 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:43,024 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 -2017-02-18 09:37:43,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 49 len: 92 to MEMORY -2017-02-18 09:37:43,043 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:43,044 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 -2017-02-18 09:37:43,054 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 49 len: 97 to MEMORY -2017-02-18 09:37:43,067 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:43,073 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 -2017-02-18 09:37:43,082 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:43,083 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:43,083 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:43,086 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:43,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes -2017-02-18 09:37:43,101 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:43,131 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk -2017-02-18 09:37:43,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:43,137 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:43,140 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes -2017-02-18 09:37:43,141 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:43,191 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 09:37:43,196 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:43,196 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000008_0 is allowed to commit now -2017-02-18 09:37:43,197 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000008 -2017-02-18 09:37:43,205 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:43,207 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000008_0' done. -2017-02-18 09:37:43,223 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000008_0 -2017-02-18 09:37:43,223 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000009_0 -2017-02-18 09:37:43,232 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:37:43,233 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:37:43,233 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@54930708 -2017-02-18 09:37:43,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:37:43,270 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:37:43,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 22 len: 69 to MEMORY -2017-02-18 09:37:43,310 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local2056867727_0001_m_000000_0 -2017-02-18 09:37:43,310 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 -2017-02-18 09:37:43,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 13 len: 58 to MEMORY -2017-02-18 09:37:43,330 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local2056867727_0001_m_000001_0 -2017-02-18 09:37:43,338 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->35 -2017-02-18 09:37:43,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:37:43,364 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2056867727_0001_m_000002_0 -2017-02-18 09:37:43,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37 -2017-02-18 09:37:43,368 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:37:43,370 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:43,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:37:43,371 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:37:43,372 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes -2017-02-18 09:37:43,379 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit -2017-02-18 09:37:43,412 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 93 bytes from disk -2017-02-18 09:37:43,413 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:37:43,413 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:37:43,417 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes -2017-02-18 09:37:43,433 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:43,468 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 09:37:43,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:37:43,471 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000009_0 is allowed to commit now -2017-02-18 09:37:43,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000009 -2017-02-18 09:37:43,484 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:37:43,487 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000009_0' done. -2017-02-18 09:37:43,487 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000009_0 -2017-02-18 09:37:43,488 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 09:37:43,493 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 09:37:43,633 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2056867727_0001 +2017-02-19 02:34:50,772 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-19 02:34:53,857 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-19 02:34:53,891 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-19 02:34:54,187 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/bpa/Assign1/output_Q2 already exists +2017-02-19 02:35:58,581 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-19 02:36:02,113 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-19 02:36:02,152 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-19 02:36:04,722 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-19 02:36:04,880 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-19 02:36:05,691 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-19 02:36:08,773 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2101611055_0001 +2017-02-19 02:36:11,894 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-19 02:36:11,896 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2101611055_0001 +2017-02-19 02:36:11,937 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-19 02:36:12,058 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:36:12,077 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-19 02:36:12,909 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-19 02:36:12,911 INFO org.apache.hadoop.mapreduce.Job: Job job_local2101611055_0001 running in uber mode : false +2017-02-19 02:36:12,910 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2101611055_0001_m_000000_0 +2017-02-19 02:36:12,917 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-19 02:36:13,386 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:36:13,593 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:36:13,615 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-19 02:36:14,991 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 02:36:14,992 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 02:36:14,992 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 02:36:14,992 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 02:36:14,993 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 02:36:15,044 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 02:36:15,116 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 02:36:19,508 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:22,512 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:25,515 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:26,322 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-19 02:36:28,517 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:31,518 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:32,333 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-19 02:36:34,522 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:35,338 INFO org.apache.hadoop.mapreduce.Job: map 4% reduce 0% +2017-02-19 02:36:37,523 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:38,342 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-19 02:36:40,527 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:41,348 INFO org.apache.hadoop.mapreduce.Job: map 7% reduce 0% +2017-02-19 02:36:43,529 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:44,350 INFO org.apache.hadoop.mapreduce.Job: map 8% reduce 0% +2017-02-19 02:36:46,546 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:47,353 INFO org.apache.hadoop.mapreduce.Job: map 10% reduce 0% +2017-02-19 02:36:49,547 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:50,360 INFO org.apache.hadoop.mapreduce.Job: map 11% reduce 0% +2017-02-19 02:36:52,548 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:36:53,368 INFO org.apache.hadoop.mapreduce.Job: map 12% reduce 0% +2017-02-19 02:37:27,675 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-19 02:37:31,166 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-19 02:37:31,167 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-19 02:37:33,297 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-19 02:37:33,426 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-19 02:37:33,976 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-19 02:37:35,755 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1344261413_0001 +2017-02-19 02:37:37,731 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-19 02:37:37,739 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1344261413_0001 +2017-02-19 02:37:37,756 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-19 02:37:37,817 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:37:37,827 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-19 02:37:38,291 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-19 02:37:38,292 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1344261413_0001_m_000000_0 +2017-02-19 02:37:38,585 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:37:38,702 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:37:38,730 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-19 02:37:38,743 INFO org.apache.hadoop.mapreduce.Job: Job job_local1344261413_0001 running in uber mode : false +2017-02-19 02:37:38,745 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-19 02:37:39,652 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 02:37:39,653 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 02:37:39,653 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 02:37:39,653 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 02:37:39,653 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 02:37:39,715 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 02:37:39,774 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 02:37:44,673 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:37:47,687 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:37:47,766 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-19 02:37:50,689 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:37:53,691 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:37:53,779 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-19 02:37:56,693 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:37:56,784 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% +2017-02-19 02:37:59,695 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:37:59,788 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-19 02:38:02,696 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:02,791 INFO org.apache.hadoop.mapreduce.Job: map 8% reduce 0% +2017-02-19 02:38:05,699 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:05,798 INFO org.apache.hadoop.mapreduce.Job: map 10% reduce 0% +2017-02-19 02:38:08,700 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:08,803 INFO org.apache.hadoop.mapreduce.Job: map 12% reduce 0% +2017-02-19 02:38:11,702 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:11,808 INFO org.apache.hadoop.mapreduce.Job: map 13% reduce 0% +2017-02-19 02:38:14,705 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:14,816 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-19 02:38:17,707 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:17,826 INFO org.apache.hadoop.mapreduce.Job: map 17% reduce 0% +2017-02-19 02:38:20,709 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:20,830 INFO org.apache.hadoop.mapreduce.Job: map 18% reduce 0% +2017-02-19 02:38:23,710 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:23,886 INFO org.apache.hadoop.mapreduce.Job: map 20% reduce 0% +2017-02-19 02:38:26,712 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:26,892 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-19 02:38:28,141 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:28,142 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 02:38:28,142 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 02:38:28,142 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5624480; bufvoid = 104857600 +2017-02-19 02:38:28,142 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25005312(100021248); length = 1209085/6553600 +2017-02-19 02:38:29,717 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:38:32,720 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 02:38:34,142 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 02:38:34,172 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1344261413_0001_m_000000_0 is done. And is in the process of committing +2017-02-19 02:38:34,181 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 02:38:34,189 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1344261413_0001_m_000000_0' done. +2017-02-19 02:38:34,192 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1344261413_0001_m_000000_0 +2017-02-19 02:38:34,194 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1344261413_0001_m_000001_0 +2017-02-19 02:38:34,210 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:38:34,211 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:38:34,226 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-19 02:38:34,698 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 02:38:34,699 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 02:38:34,700 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 02:38:34,700 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 02:38:34,701 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 02:38:34,713 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 02:38:34,758 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 02:38:34,907 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 02:38:40,215 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:40,913 INFO org.apache.hadoop.mapreduce.Job: map 41% reduce 0% +2017-02-19 02:38:43,219 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:43,915 INFO org.apache.hadoop.mapreduce.Job: map 45% reduce 0% +2017-02-19 02:38:46,221 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:46,920 INFO org.apache.hadoop.mapreduce.Job: map 50% reduce 0% +2017-02-19 02:38:49,226 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:49,925 INFO org.apache.hadoop.mapreduce.Job: map 54% reduce 0% +2017-02-19 02:38:50,382 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:50,386 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 02:38:50,388 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 02:38:50,389 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 2143719; bufvoid = 104857600 +2017-02-19 02:38:50,391 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715252(102861008); length = 499145/6553600 +2017-02-19 02:38:52,077 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 02:38:52,092 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1344261413_0001_m_000001_0 is done. And is in the process of committing +2017-02-19 02:38:52,111 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 02:38:52,114 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1344261413_0001_m_000001_0' done. +2017-02-19 02:38:52,116 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1344261413_0001_m_000001_0 +2017-02-19 02:38:52,117 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1344261413_0001_m_000002_0 +2017-02-19 02:38:52,126 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:38:52,127 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:38:52,135 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-19 02:38:52,600 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 02:38:52,610 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 02:38:52,612 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 02:38:52,614 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 02:38:52,615 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 02:38:52,624 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 02:38:52,633 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 02:38:52,928 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 02:38:58,142 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:38:58,941 INFO org.apache.hadoop.mapreduce.Job: map 79% reduce 0% +2017-02-19 02:39:01,145 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:39:01,967 INFO org.apache.hadoop.mapreduce.Job: map 85% reduce 0% +2017-02-19 02:39:02,945 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 02:39:02,948 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 02:39:02,950 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 02:39:02,950 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 1596794; bufvoid = 104857600 +2017-02-19 02:39:02,951 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25892496(103569984); length = 321901/6553600 +2017-02-19 02:39:04,074 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 02:39:04,079 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1344261413_0001_m_000002_0 is done. And is in the process of committing +2017-02-19 02:39:04,101 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 02:39:04,107 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1344261413_0001_m_000002_0' done. +2017-02-19 02:39:04,111 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1344261413_0001_m_000002_0 +2017-02-19 02:39:04,112 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-19 02:39:04,153 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-19 02:39:04,154 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1344261413_0001_r_000000_0 +2017-02-19 02:39:04,257 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 02:39:04,258 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 02:39:04,301 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5fa2ac60 +2017-02-19 02:39:04,505 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-19 02:39:04,532 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1344261413_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-19 02:39:04,971 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 02:39:05,029 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1344261413_0001_m_000002_0 decomp: 1757748 len: 1757752 to MEMORY +2017-02-19 02:39:05,171 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1757748 bytes from map-output for attempt_local1344261413_0001_m_000002_0 +2017-02-19 02:39:05,249 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1757748, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1757748 +2017-02-19 02:39:05,295 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1344261413_0001_m_000000_0 decomp: 6229026 len: 6229030 to MEMORY +2017-02-19 02:39:05,583 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 6229026 bytes from map-output for attempt_local1344261413_0001_m_000000_0 +2017-02-19 02:39:05,612 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 6229026, inMemoryMapOutputs.size() -> 2, commitMemory -> 1757748, usedMemory ->7986774 +2017-02-19 02:39:05,615 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1344261413_0001_m_000001_0 decomp: 2393295 len: 2393299 to MEMORY +2017-02-19 02:39:05,698 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2393295 bytes from map-output for attempt_local1344261413_0001_m_000001_0 +2017-02-19 02:39:05,699 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2393295, inMemoryMapOutputs.size() -> 3, commitMemory -> 7986774, usedMemory ->10380069 +2017-02-19 02:39:05,699 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-19 02:39:05,700 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-19 02:39:05,707 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-19 02:39:05,846 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-19 02:39:05,849 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 10380058 bytes +2017-02-19 02:39:09,888 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 10380069 bytes to disk to satisfy reduce memory limit +2017-02-19 02:39:09,904 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 10380069 bytes from disk +2017-02-19 02:39:09,920 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-19 02:39:09,920 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-19 02:39:09,921 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 10380062 bytes +2017-02-19 02:39:09,932 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-19 02:39:10,102 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-19 02:39:10,288 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 02:39:10,975 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 67% +2017-02-19 02:39:13,292 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 02:39:13,982 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 69% +2017-02-19 02:39:16,313 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 02:39:16,986 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-19 02:39:17,442 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1344261413_0001_r_000000_0 is done. And is in the process of committing +2017-02-19 02:39:17,465 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 02:39:17,473 INFO org.apache.hadoop.mapred.Task: Task attempt_local1344261413_0001_r_000000_0 is allowed to commit now +2017-02-19 02:39:17,487 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1344261413_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2/_temporary/0/task_local1344261413_0001_r_000000 +2017-02-19 02:39:17,492 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 02:39:17,504 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1344261413_0001_r_000000_0' done. +2017-02-19 02:39:17,505 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1344261413_0001_r_000000_0 +2017-02-19 02:39:17,505 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-19 02:39:17,683 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1344261413_0001 java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest @@ -8577,32 +473,33 @@ Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.Http at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) ... 1 more -2017-02-18 09:37:44,501 INFO org.apache.hadoop.mapreduce.Job: Job job_local2056867727_0001 failed with state FAILED due to: NA -2017-02-18 09:37:44,719 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 +2017-02-19 02:39:17,987 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-19 02:39:17,987 INFO org.apache.hadoop.mapreduce.Job: Job job_local1344261413_0001 failed with state FAILED due to: NA +2017-02-19 02:39:18,244 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 File System Counters - FILE: Number of bytes read=324416757 - FILE: Number of bytes written=3669220 + FILE: Number of bytes read=110497367 + FILE: Number of bytes written=49152988 FILE: Number of read operations=0 FILE: Number of large read operations=0 FILE: Number of write operations=0 Map-Reduce Framework Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=2645 + Map output records=507535 + Map output bytes=9364993 + Map output materialized bytes=10380081 Input split bytes=351 - Combine input records=4678719 - Combine output records=131 - Reduce input groups=77 - Reduce shuffle bytes=2645 - Reduce input records=131 - Reduce output records=77 - Spilled Records=262 - Shuffled Maps =30 + Combine input records=0 + Combine output records=0 + Reduce input groups=87026 + Reduce shuffle bytes=10380081 + Reduce input records=507535 + Reduce output records=87026 + Spilled Records=1015070 + Shuffled Maps =3 Failed Shuffles=0 - Merged Map outputs=30 - GC time elapsed (ms)=709 - Total committed heap usage (bytes)=2551959552 + Merged Map outputs=3 + GC time elapsed (ms)=1104 + Total committed heap usage (bytes)=773603328 Shuffle Errors BAD_ID=0 CONNECTION=0 @@ -8613,1774 +510,156 @@ Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.Http File Input Format Counters Bytes Read=26057874 File Output Format Counters - Bytes Written=862 -2017-02-18 09:48:17,921 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable -2017-02-18 09:48:18,238 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress -2017-02-18 09:48:18,273 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec -2017-02-18 09:48:19,566 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id -2017-02-18 09:48:19,568 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= -2017-02-18 09:48:21,035 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). -2017-02-18 09:48:21,098 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 -2017-02-18 09:48:21,373 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 -2017-02-18 09:48:22,374 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1938545376_0001 -2017-02-18 09:48:23,753 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ -2017-02-18 09:48:23,754 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1938545376_0001 -2017-02-18 09:48:23,764 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null -2017-02-18 09:48:23,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:48:23,806 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter -2017-02-18 09:48:24,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks -2017-02-18 09:48:24,127 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:48:24,295 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:48:24,379 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:48:24,385 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 -2017-02-18 09:48:24,758 INFO org.apache.hadoop.mapreduce.Job: Job job_local1938545376_0001 running in uber mode : false -2017-02-18 09:48:24,761 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% -2017-02-18 09:48:25,010 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:48:25,031 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:48:25,032 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:48:25,033 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:48:25,033 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:48:25,066 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:48:25,088 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:48:30,384 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:48:30,850 INFO org.apache.hadoop.mapreduce.Job: map 7% reduce 0% -2017-02-18 09:48:33,397 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:48:33,876 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% -2017-02-18 09:48:36,121 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map -2017-02-18 09:48:36,123 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:48:36,123 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:48:36,124 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 -2017-02-18 09:48:36,124 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 -2017-02-18 09:48:36,409 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:48:36,888 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% -2017-02-18 09:48:39,417 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:48:42,419 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:48:45,426 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:48:48,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.bz2] -2017-02-18 09:48:48,430 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:48:51,436 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:48:54,810 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:48:54,835 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000000_0 is done. And is in the process of committing -2017-02-18 09:48:54,842 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:48:54,846 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000000_0' done. -2017-02-18 09:48:54,847 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:48:54,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:48:54,857 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:48:54,858 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:48:54,865 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 -2017-02-18 09:48:55,055 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:48:55,162 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:48:55,164 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:48:55,172 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:48:55,185 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:48:58,333 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:48:58,341 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:48:58,341 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:48:58,342 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 -2017-02-18 09:48:58,342 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 -2017-02-18 09:48:59,085 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% -2017-02-18 09:49:00,871 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:49:01,095 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% -2017-02-18 09:49:03,873 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:49:04,061 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:49:04,076 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000001_0 is done. And is in the process of committing -2017-02-18 09:49:04,081 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:49:04,085 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000001_0' done. -2017-02-18 09:49:04,085 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:04,086 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:04,093 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:04,094 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:04,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 -2017-02-18 09:49:04,290 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:49:04,384 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) -2017-02-18 09:49:04,405 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 -2017-02-18 09:49:04,406 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 -2017-02-18 09:49:04,407 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 -2017-02-18 09:49:04,407 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 -2017-02-18 09:49:04,414 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer -2017-02-18 09:49:04,416 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it -2017-02-18 09:49:06,527 INFO org.apache.hadoop.mapred.LocalJobRunner: -2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output -2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: Spilling map output -2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 -2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 -2017-02-18 09:49:07,315 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% -2017-02-18 09:49:10,103 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort -2017-02-18 09:49:10,335 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% -2017-02-18 09:49:11,103 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 -2017-02-18 09:49:11,115 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000002_0 is done. And is in the process of committing -2017-02-18 09:49:11,123 INFO org.apache.hadoop.mapred.LocalJobRunner: map -2017-02-18 09:49:11,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000002_0' done. -2017-02-18 09:49:11,125 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:11,126 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. -2017-02-18 09:49:11,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks -2017-02-18 09:49:11,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000000_0 -2017-02-18 09:49:11,269 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:11,270 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:11,288 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@639e075e -2017-02-18 09:49:11,337 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% -2017-02-18 09:49:11,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:11,397 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:11,571 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.bz2] -2017-02-18 09:49:11,575 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 20 len: 70 to MEMORY -2017-02-18 09:49:11,608 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 20 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:11,619 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 20, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->20 -2017-02-18 09:49:11,633 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:11,642 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:11,646 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 20, usedMemory ->31 -2017-02-18 09:49:11,651 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:11,658 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:11,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 31, usedMemory ->42 -2017-02-18 09:49:11,664 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:11,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:11,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:11,692 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:11,701 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes -2017-02-18 09:49:11,728 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 42 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:11,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 95 bytes from disk -2017-02-18 09:49:11,738 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:11,741 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:11,746 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 33 bytes -2017-02-18 09:49:11,748 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:11,796 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords -2017-02-18 09:49:11,799 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000000_0 is done. And is in the process of committing -2017-02-18 09:49:11,800 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:11,800 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000000_0 is allowed to commit now -2017-02-18 09:49:11,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000000 -2017-02-18 09:49:11,811 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:11,813 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000000_0' done. -2017-02-18 09:49:11,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000000_0 -2017-02-18 09:49:11,821 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000001_0 -2017-02-18 09:49:11,832 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:11,833 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:11,834 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@362f58d7 -2017-02-18 09:49:11,840 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:11,854 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:11,863 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 29 len: 79 to MEMORY -2017-02-18 09:49:11,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 29 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:11,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 29, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->29 -2017-02-18 09:49:11,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 9 len: 53 to MEMORY -2017-02-18 09:49:11,897 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:11,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9, inMemoryMapOutputs.size() -> 2, commitMemory -> 29, usedMemory ->38 -2017-02-18 09:49:11,905 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 9 len: 49 to MEMORY -2017-02-18 09:49:11,919 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:11,924 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9, inMemoryMapOutputs.size() -> 3, commitMemory -> 38, usedMemory ->47 -2017-02-18 09:49:11,927 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:11,928 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:11,928 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:11,929 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:11,930 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 38 bytes -2017-02-18 09:49:12,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 47 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:12,007 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 104 bytes from disk -2017-02-18 09:49:12,009 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:12,010 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:12,011 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 40 bytes -2017-02-18 09:49:12,016 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,051 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000001_0 is done. And is in the process of committing -2017-02-18 09:49:12,053 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,053 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000001_0 is allowed to commit now -2017-02-18 09:49:12,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000001 -2017-02-18 09:49:12,083 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:12,096 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000001_0' done. -2017-02-18 09:49:12,097 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000001_0 -2017-02-18 09:49:12,098 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000002_0 -2017-02-18 09:49:12,108 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:12,108 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:12,109 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6b121f65 -2017-02-18 09:49:12,115 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:12,134 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:12,143 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:12,162 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:12,164 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:12,168 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:12,176 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:12,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:12,183 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:12,191 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:12,196 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:12,198 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:12,199 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,199 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:12,203 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:12,203 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:12,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:12,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:12,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:12,245 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:12,246 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:12,250 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,295 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000002_0 is done. And is in the process of committing -2017-02-18 09:49:12,298 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,298 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000002_0 is allowed to commit now -2017-02-18 09:49:12,299 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000002 -2017-02-18 09:49:12,304 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:12,307 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000002_0' done. -2017-02-18 09:49:12,307 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000002_0 -2017-02-18 09:49:12,308 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000003_0 -2017-02-18 09:49:12,318 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:12,319 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:12,319 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@533f785b -2017-02-18 09:49:12,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:12,344 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 09:49:12,350 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:12,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 56 to MEMORY -2017-02-18 09:49:12,364 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:12,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 -2017-02-18 09:49:12,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:12,388 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:12,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 -2017-02-18 09:49:12,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:12,412 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:12,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 -2017-02-18 09:49:12,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:12,422 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,422 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:12,424 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:12,425 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:12,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:12,477 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 70 bytes from disk -2017-02-18 09:49:12,479 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:12,480 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:12,492 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:12,501 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,547 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000003_0 is done. And is in the process of committing -2017-02-18 09:49:12,549 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,550 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000003_0 is allowed to commit now -2017-02-18 09:49:12,552 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000003 -2017-02-18 09:49:12,557 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:12,560 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000003_0' done. -2017-02-18 09:49:12,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000003_0 -2017-02-18 09:49:12,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000004_0 -2017-02-18 09:49:12,575 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:12,577 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:12,579 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@51d934bf -2017-02-18 09:49:12,586 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:12,599 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:12,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 56 to MEMORY -2017-02-18 09:49:12,621 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:12,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 -2017-02-18 09:49:12,628 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:12,635 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:12,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22 -2017-02-18 09:49:12,641 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:12,648 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:12,652 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33 -2017-02-18 09:49:12,656 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:12,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:12,668 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:12,670 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes -2017-02-18 09:49:12,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:12,705 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 86 bytes from disk -2017-02-18 09:49:12,706 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:12,707 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:12,708 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes -2017-02-18 09:49:12,712 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,744 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000004_0 is done. And is in the process of committing -2017-02-18 09:49:12,747 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,756 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000004_0 is allowed to commit now -2017-02-18 09:49:12,757 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000004 -2017-02-18 09:49:12,763 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:12,766 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000004_0' done. -2017-02-18 09:49:12,767 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000004_0 -2017-02-18 09:49:12,770 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000005_0 -2017-02-18 09:49:12,777 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:12,778 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:12,785 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1af653a6 -2017-02-18 09:49:12,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:12,811 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:12,821 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 33 len: 81 to MEMORY -2017-02-18 09:49:12,826 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:12,829 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->33 -2017-02-18 09:49:12,832 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 23 len: 69 to MEMORY -2017-02-18 09:49:12,850 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:12,856 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 33, usedMemory ->56 -2017-02-18 09:49:12,868 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 23 len: 68 to MEMORY -2017-02-18 09:49:12,875 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:12,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 56, usedMemory ->79 -2017-02-18 09:49:12,881 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:12,882 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,882 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:12,884 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:12,884 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 61 bytes -2017-02-18 09:49:12,911 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 79 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:12,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 113 bytes from disk -2017-02-18 09:49:12,927 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:12,928 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:12,929 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69 bytes -2017-02-18 09:49:12,935 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,979 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000005_0 is done. And is in the process of committing -2017-02-18 09:49:12,981 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:12,981 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000005_0 is allowed to commit now -2017-02-18 09:49:12,984 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000005 -2017-02-18 09:49:12,987 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:12,989 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000005_0' done. -2017-02-18 09:49:12,990 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000005_0 -2017-02-18 09:49:12,991 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000006_0 -2017-02-18 09:49:13,000 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:13,001 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:13,001 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@685461f1 -2017-02-18 09:49:13,014 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:13,027 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:13,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:13,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:13,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,074 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:13,077 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:13,079 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,085 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:13,098 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:13,099 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:13,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:13,102 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:13,102 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:13,122 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:13,150 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:13,151 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000006_0 is done. And is in the process of committing -2017-02-18 09:49:13,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000006_0 is allowed to commit now -2017-02-18 09:49:13,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000006 -2017-02-18 09:49:13,217 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:13,221 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000006_0' done. -2017-02-18 09:49:13,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000006_0 -2017-02-18 09:49:13,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000007_0 -2017-02-18 09:49:13,234 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:13,235 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:13,238 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5f132bfb -2017-02-18 09:49:13,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:13,260 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:13,270 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 33 len: 79 to MEMORY -2017-02-18 09:49:13,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:13,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->33 -2017-02-18 09:49:13,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,313 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:13,314 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 33, usedMemory ->35 -2017-02-18 09:49:13,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,330 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:13,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37 -2017-02-18 09:49:13,339 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:13,340 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:13,342 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:13,343 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes -2017-02-18 09:49:13,352 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 14% -2017-02-18 09:49:13,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 93 bytes from disk -2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:13,411 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes -2017-02-18 09:49:13,412 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,458 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000007_0 is done. And is in the process of committing -2017-02-18 09:49:13,459 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,460 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000007_0 is allowed to commit now -2017-02-18 09:49:13,466 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000007 -2017-02-18 09:49:13,474 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:13,478 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000007_0' done. -2017-02-18 09:49:13,479 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000007_0 -2017-02-18 09:49:13,480 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000008_0 -2017-02-18 09:49:13,487 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:13,488 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:13,496 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@19be1bc9 -2017-02-18 09:49:13,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:13,527 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:13,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 73 to MEMORY -2017-02-18 09:49:13,551 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:13,561 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 -2017-02-18 09:49:13,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 59 to MEMORY -2017-02-18 09:49:13,572 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:13,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->35 -2017-02-18 09:49:13,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 59 to MEMORY -2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->47 -2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:13,607 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:13,609 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:13,609 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 29 bytes -2017-02-18 09:49:13,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 47 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:13,649 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk -2017-02-18 09:49:13,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:13,651 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:13,656 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 37 bytes -2017-02-18 09:49:13,659 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,705 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000008_0 is done. And is in the process of committing -2017-02-18 09:49:13,707 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,707 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000008_0 is allowed to commit now -2017-02-18 09:49:13,708 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000008 -2017-02-18 09:49:13,715 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:13,722 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000008_0' done. -2017-02-18 09:49:13,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000008_0 -2017-02-18 09:49:13,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000009_0 -2017-02-18 09:49:13,739 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:13,740 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:13,744 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f9623fe -2017-02-18 09:49:13,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:13,801 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:13,812 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,816 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:13,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:13,830 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,832 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:13,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:13,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:13,867 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:13,875 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:13,880 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:13,881 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:13,883 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:13,883 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:13,890 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:13,907 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:13,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:13,913 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:13,924 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:13,935 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,976 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000009_0 is done. And is in the process of committing -2017-02-18 09:49:13,977 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:13,977 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000009_0 is allowed to commit now -2017-02-18 09:49:13,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000009 -2017-02-18 09:49:13,990 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:13,996 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000009_0' done. -2017-02-18 09:49:13,997 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000009_0 -2017-02-18 09:49:13,997 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000010_0 -2017-02-18 09:49:14,014 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:14,015 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:14,025 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2926cd3a -2017-02-18 09:49:14,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:14,046 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000010_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:14,055 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:14,061 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:14,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:14,069 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:14,081 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:14,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:14,098 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:14,106 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:14,110 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:14,112 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:14,113 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,113 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:14,116 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:14,116 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:14,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:14,150 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:14,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:14,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:14,153 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:14,154 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,186 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000010_0 is done. And is in the process of committing -2017-02-18 09:49:14,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,188 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000010_0 is allowed to commit now -2017-02-18 09:49:14,189 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000010_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000010 -2017-02-18 09:49:14,191 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:14,194 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000010_0' done. -2017-02-18 09:49:14,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000010_0 -2017-02-18 09:49:14,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000011_0 -2017-02-18 09:49:14,202 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:14,202 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:14,203 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@38ea76e6 -2017-02-18 09:49:14,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:14,232 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000011_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:14,241 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 44 len: 92 to MEMORY -2017-02-18 09:49:14,248 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 44 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:14,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 44, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->44 -2017-02-18 09:49:14,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 33 len: 81 to MEMORY -2017-02-18 09:49:14,268 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:14,271 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 44, usedMemory ->77 -2017-02-18 09:49:14,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 13 len: 58 to MEMORY -2017-02-18 09:49:14,297 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:14,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->90 -2017-02-18 09:49:14,303 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:14,305 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,305 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:14,306 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:14,306 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes -2017-02-18 09:49:14,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 90 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:14,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk -2017-02-18 09:49:14,339 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:14,339 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:14,341 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 79 bytes -2017-02-18 09:49:14,351 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,354 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 22% -2017-02-18 09:49:14,388 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000011_0 is done. And is in the process of committing -2017-02-18 09:49:14,390 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,390 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000011_0 is allowed to commit now -2017-02-18 09:49:14,391 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000011_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000011 -2017-02-18 09:49:14,395 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:14,397 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000011_0' done. -2017-02-18 09:49:14,398 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000011_0 -2017-02-18 09:49:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000012_0 -2017-02-18 09:49:14,431 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:14,432 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:14,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3038859d -2017-02-18 09:49:14,461 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:14,475 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000012_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:14,491 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 32 len: 80 to MEMORY -2017-02-18 09:49:14,506 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 32 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:14,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 32, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->32 -2017-02-18 09:49:14,531 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 22 len: 65 to MEMORY -2017-02-18 09:49:14,540 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:14,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 2, commitMemory -> 32, usedMemory ->54 -2017-02-18 09:49:14,548 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 60 to MEMORY -2017-02-18 09:49:14,559 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:14,564 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 54, usedMemory ->66 -2017-02-18 09:49:14,568 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:14,569 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,569 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:14,570 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:14,570 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 48 bytes -2017-02-18 09:49:14,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 66 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:14,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 110 bytes from disk -2017-02-18 09:49:14,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:14,604 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:14,605 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 56 bytes -2017-02-18 09:49:14,609 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,648 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000012_0 is done. And is in the process of committing -2017-02-18 09:49:14,650 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,650 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000012_0 is allowed to commit now -2017-02-18 09:49:14,651 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000012_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000012 -2017-02-18 09:49:14,656 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:14,659 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000012_0' done. -2017-02-18 09:49:14,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000012_0 -2017-02-18 09:49:14,660 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000013_0 -2017-02-18 09:49:14,668 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:14,669 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:14,670 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1498c437 -2017-02-18 09:49:14,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:14,698 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000013_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:14,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:14,715 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:14,722 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:14,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:14,733 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:14,737 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:14,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:14,747 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:14,750 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:14,752 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:14,753 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:14,775 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:14,782 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:14,818 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:14,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:14,831 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:14,832 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:14,833 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:14,837 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,869 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000013_0 is done. And is in the process of committing -2017-02-18 09:49:14,870 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:14,870 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000013_0 is allowed to commit now -2017-02-18 09:49:14,871 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000013_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000013 -2017-02-18 09:49:14,873 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:14,885 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000013_0' done. -2017-02-18 09:49:14,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000013_0 -2017-02-18 09:49:14,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000014_0 -2017-02-18 09:49:14,893 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:14,893 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:14,894 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@37250eeb -2017-02-18 09:49:14,903 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:14,920 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000014_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:14,934 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 56 to MEMORY -2017-02-18 09:49:14,951 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:14,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 -2017-02-18 09:49:14,971 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:14,995 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:14,995 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 -2017-02-18 09:49:14,997 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,010 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:15,021 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 -2017-02-18 09:49:15,021 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:15,022 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,022 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:15,024 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:15,025 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:15,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:15,074 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 70 bytes from disk -2017-02-18 09:49:15,075 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:15,075 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:15,081 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:15,082 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,115 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000014_0 is done. And is in the process of committing -2017-02-18 09:49:15,118 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,119 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000014_0 is allowed to commit now -2017-02-18 09:49:15,120 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000014_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000014 -2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000014_0' done. -2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000014_0 -2017-02-18 09:49:15,137 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000015_0 -2017-02-18 09:49:15,148 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:15,149 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:15,150 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@42005598 -2017-02-18 09:49:15,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:15,189 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000015_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:15,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,203 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:15,207 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:15,210 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,228 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:15,239 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:15,241 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,255 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:15,262 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:15,263 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:15,264 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:15,265 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:15,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:15,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:15,325 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:15,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:15,335 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:15,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:15,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,356 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% -2017-02-18 09:49:15,382 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000015_0 is done. And is in the process of committing -2017-02-18 09:49:15,384 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,384 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000015_0 is allowed to commit now -2017-02-18 09:49:15,385 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000015_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000015 -2017-02-18 09:49:15,388 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:15,390 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000015_0' done. -2017-02-18 09:49:15,391 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000015_0 -2017-02-18 09:49:15,392 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000016_0 -2017-02-18 09:49:15,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:15,423 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:15,424 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@562962ff -2017-02-18 09:49:15,431 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:15,445 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000016_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:15,448 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 75 to MEMORY -2017-02-18 09:49:15,458 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:15,463 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 -2017-02-18 09:49:15,489 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,491 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:15,495 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 -2017-02-18 09:49:15,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,504 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:15,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 -2017-02-18 09:49:15,509 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:15,510 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:15,513 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:15,513 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes -2017-02-18 09:49:15,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:15,568 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 89 bytes from disk -2017-02-18 09:49:15,570 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:15,570 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:15,571 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes -2017-02-18 09:49:15,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,620 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000016_0 is done. And is in the process of committing -2017-02-18 09:49:15,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,622 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000016_0 is allowed to commit now -2017-02-18 09:49:15,626 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000016_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000016 -2017-02-18 09:49:15,635 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:15,635 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000016_0' done. -2017-02-18 09:49:15,636 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000016_0 -2017-02-18 09:49:15,646 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000017_0 -2017-02-18 09:49:15,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:15,653 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:15,653 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7a960b08 -2017-02-18 09:49:15,660 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:15,670 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000017_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:15,690 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 13 len: 58 to MEMORY -2017-02-18 09:49:15,706 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:15,710 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->13 -2017-02-18 09:49:15,713 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,720 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:15,723 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 13, usedMemory ->15 -2017-02-18 09:49:15,726 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:15,745 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:15,745 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17 -2017-02-18 09:49:15,746 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:15,746 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,747 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:15,748 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:15,758 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:15,782 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:15,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 72 bytes from disk -2017-02-18 09:49:15,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:15,798 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:15,799 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:15,803 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,828 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000017_0 is done. And is in the process of committing -2017-02-18 09:49:15,837 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,837 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000017_0 is allowed to commit now -2017-02-18 09:49:15,838 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000017_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000017 -2017-02-18 09:49:15,845 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:15,847 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000017_0' done. -2017-02-18 09:49:15,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000017_0 -2017-02-18 09:49:15,854 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000018_0 -2017-02-18 09:49:15,860 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:15,861 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:15,861 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a08ec1c -2017-02-18 09:49:15,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:15,882 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000018_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:15,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 40 len: 94 to MEMORY -2017-02-18 09:49:15,906 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 40 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:15,910 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 40, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->40 -2017-02-18 09:49:15,913 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 31 len: 77 to MEMORY -2017-02-18 09:49:15,923 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:15,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 40, usedMemory ->71 -2017-02-18 09:49:15,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 31 len: 80 to MEMORY -2017-02-18 09:49:15,940 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:15,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 71, usedMemory ->102 -2017-02-18 09:49:15,947 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:15,948 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:15,948 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:15,949 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:15,949 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 84 bytes -2017-02-18 09:49:15,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 102 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:15,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 137 bytes from disk -2017-02-18 09:49:15,992 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:15,993 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:15,994 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 92 bytes -2017-02-18 09:49:15,999 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,027 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000018_0 is done. And is in the process of committing -2017-02-18 09:49:16,029 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000018_0 is allowed to commit now -2017-02-18 09:49:16,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000018_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000018 -2017-02-18 09:49:16,041 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:16,043 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000018_0' done. -2017-02-18 09:49:16,044 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000018_0 -2017-02-18 09:49:16,045 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000019_0 -2017-02-18 09:49:16,051 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:16,052 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:16,053 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cac98e5 -2017-02-18 09:49:16,060 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:16,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000019_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:16,082 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,097 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:16,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:16,104 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,114 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:16,124 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:16,128 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,134 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:16,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:16,140 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:16,141 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,142 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:16,143 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:16,145 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:16,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:16,183 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:16,185 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:16,186 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:16,187 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:16,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,221 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000019_0 is done. And is in the process of committing -2017-02-18 09:49:16,222 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,222 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000019_0 is allowed to commit now -2017-02-18 09:49:16,223 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000019_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000019 -2017-02-18 09:49:16,227 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:16,230 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000019_0' done. -2017-02-18 09:49:16,230 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000019_0 -2017-02-18 09:49:16,231 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000020_0 -2017-02-18 09:49:16,244 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:16,245 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:16,245 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@29a3caf2 -2017-02-18 09:49:16,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:16,267 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000020_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:16,273 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 74 to MEMORY -2017-02-18 09:49:16,284 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:16,288 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 -2017-02-18 09:49:16,296 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 59 to MEMORY -2017-02-18 09:49:16,300 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:16,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->34 -2017-02-18 09:49:16,314 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,327 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:16,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->36 -2017-02-18 09:49:16,334 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:16,335 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:16,337 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:16,337 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 21 bytes -2017-02-18 09:49:16,361 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 36 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:16,365 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% -2017-02-18 09:49:16,373 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 96 bytes from disk -2017-02-18 09:49:16,375 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:16,377 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:16,379 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes -2017-02-18 09:49:16,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,407 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000020_0 is done. And is in the process of committing -2017-02-18 09:49:16,409 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,409 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000020_0 is allowed to commit now -2017-02-18 09:49:16,410 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000020_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000020 -2017-02-18 09:49:16,413 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:16,415 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000020_0' done. -2017-02-18 09:49:16,416 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000020_0 -2017-02-18 09:49:16,416 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000021_0 -2017-02-18 09:49:16,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:16,423 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:16,423 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@610f3309 -2017-02-18 09:49:16,441 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:16,456 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000021_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:16,465 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,482 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:16,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:16,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,526 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:16,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:16,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,547 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:16,548 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:16,548 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:16,549 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,549 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:16,550 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:16,558 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:16,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:16,593 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:16,593 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:16,597 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:16,599 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:16,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,634 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000021_0 is done. And is in the process of committing -2017-02-18 09:49:16,635 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,636 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000021_0 is allowed to commit now -2017-02-18 09:49:16,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000021_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000021 -2017-02-18 09:49:16,660 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:16,661 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000021_0' done. -2017-02-18 09:49:16,661 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000021_0 -2017-02-18 09:49:16,665 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000022_0 -2017-02-18 09:49:16,677 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:16,678 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:16,685 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1906c58a -2017-02-18 09:49:16,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:16,705 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000022_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:16,722 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 32 len: 79 to MEMORY -2017-02-18 09:49:16,739 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 32 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:16,739 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 32, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->32 -2017-02-18 09:49:16,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,761 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:16,770 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 32, usedMemory ->34 -2017-02-18 09:49:16,786 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 57 to MEMORY -2017-02-18 09:49:16,788 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:16,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->46 -2017-02-18 09:49:16,794 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:16,795 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:16,798 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:16,798 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 33 bytes -2017-02-18 09:49:16,834 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 46 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:16,835 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 99 bytes from disk -2017-02-18 09:49:16,843 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:16,847 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:16,849 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 37 bytes -2017-02-18 09:49:16,852 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,880 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000022_0 is done. And is in the process of committing -2017-02-18 09:49:16,883 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,890 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000022_0 is allowed to commit now -2017-02-18 09:49:16,894 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000022_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000022 -2017-02-18 09:49:16,899 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:16,902 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000022_0' done. -2017-02-18 09:49:16,904 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000022_0 -2017-02-18 09:49:16,905 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000023_0 -2017-02-18 09:49:16,914 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:16,915 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:16,915 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@40151a53 -2017-02-18 09:49:16,926 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:16,939 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000023_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:16,945 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 70 to MEMORY -2017-02-18 09:49:16,951 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:16,954 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 -2017-02-18 09:49:16,974 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,982 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:16,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 -2017-02-18 09:49:16,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:16,991 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:16,996 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 -2017-02-18 09:49:16,998 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:16,999 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:16,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:17,002 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:17,002 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes -2017-02-18 09:49:17,045 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:17,047 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 84 bytes from disk -2017-02-18 09:49:17,050 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:17,051 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:17,053 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes -2017-02-18 09:49:17,060 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,090 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000023_0 is done. And is in the process of committing -2017-02-18 09:49:17,092 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,092 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000023_0 is allowed to commit now -2017-02-18 09:49:17,093 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000023_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000023 -2017-02-18 09:49:17,097 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:17,100 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000023_0' done. -2017-02-18 09:49:17,100 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000023_0 -2017-02-18 09:49:17,101 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000024_0 -2017-02-18 09:49:17,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:17,117 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:17,117 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@9fc1ec5 -2017-02-18 09:49:17,122 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:17,137 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000024_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:17,143 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,149 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:17,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:17,159 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,170 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:17,174 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:17,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,188 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:17,211 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:17,212 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:17,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:17,215 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:17,216 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:17,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:17,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:17,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:17,246 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:17,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:17,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,299 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000024_0 is done. And is in the process of committing -2017-02-18 09:49:17,301 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,301 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000024_0 is allowed to commit now -2017-02-18 09:49:17,302 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000024_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000024 -2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000024_0' done. -2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000024_0 -2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000025_0 -2017-02-18 09:49:17,322 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:17,323 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:17,324 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2793d5d7 -2017-02-18 09:49:17,333 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:17,344 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000025_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:17,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,363 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:17,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:17,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,374 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 09:49:17,377 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:17,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:17,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,395 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:17,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:17,401 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:17,402 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,402 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:17,403 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:17,406 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:17,429 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:17,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:17,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:17,449 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:17,472 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:17,473 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,498 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000025_0 is done. And is in the process of committing -2017-02-18 09:49:17,500 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,500 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000025_0 is allowed to commit now -2017-02-18 09:49:17,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000025_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000025 -2017-02-18 09:49:17,520 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000025_0' done. -2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000025_0 -2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000026_0 -2017-02-18 09:49:17,535 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:17,536 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:17,537 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5413556b -2017-02-18 09:49:17,545 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:17,560 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000026_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:17,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:17,572 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:17,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 -2017-02-18 09:49:17,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:17,586 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:17,589 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22 -2017-02-18 09:49:17,592 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:17,597 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:17,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33 -2017-02-18 09:49:17,610 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:17,611 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:17,613 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:17,613 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes -2017-02-18 09:49:17,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:17,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk -2017-02-18 09:49:17,652 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:17,652 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:17,653 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes -2017-02-18 09:49:17,657 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,693 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000026_0 is done. And is in the process of committing -2017-02-18 09:49:17,696 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,709 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000026_0 is allowed to commit now -2017-02-18 09:49:17,710 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000026_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000026 -2017-02-18 09:49:17,711 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:17,719 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000026_0' done. -2017-02-18 09:49:17,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000026_0 -2017-02-18 09:49:17,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000027_0 -2017-02-18 09:49:17,729 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:17,730 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:17,730 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cee97a9 -2017-02-18 09:49:17,745 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:17,765 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000027_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:17,771 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 14 len: 60 to MEMORY -2017-02-18 09:49:17,781 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 14 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:17,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 14, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->14 -2017-02-18 09:49:17,788 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:17,808 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 14, usedMemory ->16 -2017-02-18 09:49:17,810 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:17,827 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:17,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 16, usedMemory ->18 -2017-02-18 09:49:17,828 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:17,829 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,829 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:17,830 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:17,838 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:17,859 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 18 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:17,878 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 74 bytes from disk -2017-02-18 09:49:17,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:17,880 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:17,881 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:17,891 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,941 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000027_0 is done. And is in the process of committing -2017-02-18 09:49:17,942 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:17,942 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000027_0 is allowed to commit now -2017-02-18 09:49:17,943 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000027_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000027 -2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000027_0' done. -2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000027_0 -2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000028_0 -2017-02-18 09:49:17,960 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:17,961 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:17,961 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@760bfc86 -2017-02-18 09:49:17,973 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:17,990 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000028_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:17,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 19 len: 64 to MEMORY -2017-02-18 09:49:18,010 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:18,010 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->19 -2017-02-18 09:49:18,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 10 len: 54 to MEMORY -2017-02-18 09:49:18,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:18,050 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10, inMemoryMapOutputs.size() -> 2, commitMemory -> 19, usedMemory ->29 -2017-02-18 09:49:18,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 10 len: 55 to MEMORY -2017-02-18 09:49:18,071 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:18,071 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10, inMemoryMapOutputs.size() -> 3, commitMemory -> 29, usedMemory ->39 -2017-02-18 09:49:18,072 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:18,072 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,073 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:18,075 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:18,076 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes -2017-02-18 09:49:18,125 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 39 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:18,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 90 bytes from disk -2017-02-18 09:49:18,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:18,133 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:18,175 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 31 bytes -2017-02-18 09:49:18,176 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,204 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000028_0 is done. And is in the process of committing -2017-02-18 09:49:18,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,218 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000028_0 is allowed to commit now -2017-02-18 09:49:18,219 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000028_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000028 -2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000028_0' done. -2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000028_0 -2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000029_0 -2017-02-18 09:49:18,230 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:18,235 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:18,246 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6cf7a61b -2017-02-18 09:49:18,260 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:18,283 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000029_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:18,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:18,292 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:18,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:18,312 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 13 len: 58 to MEMORY -2017-02-18 09:49:18,315 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:18,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->15 -2017-02-18 09:49:18,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:18,327 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:18,329 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17 -2017-02-18 09:49:18,331 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:18,334 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:18,335 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:18,344 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:18,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:18,380 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 58% -2017-02-18 09:49:18,382 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 72 bytes from disk -2017-02-18 09:49:18,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:18,387 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:18,388 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:18,394 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,432 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000029_0 is done. And is in the process of committing -2017-02-18 09:49:18,434 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,434 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000029_0 is allowed to commit now -2017-02-18 09:49:18,435 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000029_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000029 -2017-02-18 09:49:18,438 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:18,443 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000029_0' done. -2017-02-18 09:49:18,444 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000029_0 -2017-02-18 09:49:18,449 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000030_0 -2017-02-18 09:49:18,455 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:18,455 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:18,456 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f37d9b5 -2017-02-18 09:49:18,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:18,480 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000030_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:18,492 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 68 to MEMORY -2017-02-18 09:49:18,495 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:18,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 -2017-02-18 09:49:18,502 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 60 to MEMORY -2017-02-18 09:49:18,508 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:18,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->34 -2017-02-18 09:49:18,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 22 len: 70 to MEMORY -2017-02-18 09:49:18,519 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:18,522 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->56 -2017-02-18 09:49:18,524 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:18,525 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,525 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:18,527 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:18,527 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 38 bytes -2017-02-18 09:49:18,574 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 56 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:18,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 104 bytes from disk -2017-02-18 09:49:18,580 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:18,581 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:18,582 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 46 bytes -2017-02-18 09:49:18,586 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,619 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000030_0 is done. And is in the process of committing -2017-02-18 09:49:18,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,621 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000030_0 is allowed to commit now -2017-02-18 09:49:18,623 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000030_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000030 -2017-02-18 09:49:18,626 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:18,629 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000030_0' done. -2017-02-18 09:49:18,629 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000030_0 -2017-02-18 09:49:18,630 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000031_0 -2017-02-18 09:49:18,642 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:18,643 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:18,644 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1d2ab9ab -2017-02-18 09:49:18,651 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:18,665 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000031_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:18,672 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 43 len: 94 to MEMORY -2017-02-18 09:49:18,678 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 43 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:18,681 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 43, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->43 -2017-02-18 09:49:18,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 59 to MEMORY -2017-02-18 09:49:18,690 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:18,701 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 43, usedMemory ->54 -2017-02-18 09:49:18,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 21 len: 70 to MEMORY -2017-02-18 09:49:18,712 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 21 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:18,716 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 21, inMemoryMapOutputs.size() -> 3, commitMemory -> 54, usedMemory ->75 -2017-02-18 09:49:18,718 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:18,719 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,719 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:18,720 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:18,721 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 59 bytes -2017-02-18 09:49:18,765 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 75 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:18,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 124 bytes from disk -2017-02-18 09:49:18,769 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:18,769 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:18,771 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 65 bytes -2017-02-18 09:49:18,775 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,811 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000031_0 is done. And is in the process of committing -2017-02-18 09:49:18,816 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,816 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000031_0 is allowed to commit now -2017-02-18 09:49:18,817 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000031_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000031 -2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000031_0' done. -2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000031_0 -2017-02-18 09:49:18,831 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000032_0 -2017-02-18 09:49:18,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:18,842 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:18,843 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3fb90b1b -2017-02-18 09:49:18,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:18,869 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000032_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:18,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 20 len: 66 to MEMORY -2017-02-18 09:49:18,882 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 20 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:18,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 20, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->20 -2017-02-18 09:49:18,898 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:18,901 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:18,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 20, usedMemory ->31 -2017-02-18 09:49:18,915 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY -2017-02-18 09:49:18,922 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:18,929 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 31, usedMemory ->42 -2017-02-18 09:49:18,932 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:18,940 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:18,940 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:18,941 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:18,941 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes -2017-02-18 09:49:18,981 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 42 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:18,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 87 bytes from disk -2017-02-18 09:49:18,987 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:18,988 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:18,989 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 33 bytes -2017-02-18 09:49:18,993 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,028 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000032_0 is done. And is in the process of committing -2017-02-18 09:49:19,030 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000032_0 is allowed to commit now -2017-02-18 09:49:19,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000032_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000032 -2017-02-18 09:49:19,036 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:19,038 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000032_0' done. -2017-02-18 09:49:19,038 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000032_0 -2017-02-18 09:49:19,039 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000033_0 -2017-02-18 09:49:19,046 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:19,046 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:19,047 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@44aa3f1a -2017-02-18 09:49:19,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:19,069 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000033_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:19,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 59 to MEMORY -2017-02-18 09:49:19,081 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:19,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 -2017-02-18 09:49:19,087 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:19,094 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:19,097 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22 -2017-02-18 09:49:19,099 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY -2017-02-18 09:49:19,115 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:19,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33 -2017-02-18 09:49:19,125 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:19,126 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,126 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:19,127 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:19,127 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes -2017-02-18 09:49:19,155 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:19,158 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 82 bytes from disk -2017-02-18 09:49:19,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:19,160 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:19,161 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes -2017-02-18 09:49:19,165 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000033_0 is done. And is in the process of committing -2017-02-18 09:49:19,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000033_0 is allowed to commit now -2017-02-18 09:49:19,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000033_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000033 -2017-02-18 09:49:19,215 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:19,218 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000033_0' done. -2017-02-18 09:49:19,219 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000033_0 -2017-02-18 09:49:19,221 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000034_0 -2017-02-18 09:49:19,226 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:19,227 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:19,227 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4b9eaa86 -2017-02-18 09:49:19,233 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:19,248 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000034_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:19,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 57 to MEMORY -2017-02-18 09:49:19,261 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:19,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 -2017-02-18 09:49:19,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:19,272 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:19,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->13 -2017-02-18 09:49:19,278 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY -2017-02-18 09:49:19,284 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:19,287 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 13, usedMemory ->24 -2017-02-18 09:49:19,288 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:19,289 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:19,290 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:19,291 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 12 bytes -2017-02-18 09:49:19,323 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 24 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:19,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 81 bytes from disk -2017-02-18 09:49:19,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:19,334 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:19,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes -2017-02-18 09:49:19,344 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,377 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000034_0 is done. And is in the process of committing -2017-02-18 09:49:19,378 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,379 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000034_0 is allowed to commit now -2017-02-18 09:49:19,379 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000034_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000034 -2017-02-18 09:49:19,384 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 68% -2017-02-18 09:49:19,386 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:19,388 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000034_0' done. -2017-02-18 09:49:19,389 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000034_0 -2017-02-18 09:49:19,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000035_0 -2017-02-18 09:49:19,400 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:19,401 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:19,403 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@23bd25db -2017-02-18 09:49:19,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:19,422 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000035_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:19,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 72 to MEMORY -2017-02-18 09:49:19,449 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:19,456 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 -2017-02-18 09:49:19,461 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:19,468 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:19,473 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 -2017-02-18 09:49:19,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:19,487 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:19,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 -2017-02-18 09:49:19,491 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:19,492 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,492 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:19,504 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:19,504 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes -2017-02-18 09:49:19,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:19,541 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 86 bytes from disk -2017-02-18 09:49:19,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:19,545 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:19,547 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes -2017-02-18 09:49:19,550 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,579 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000035_0 is done. And is in the process of committing -2017-02-18 09:49:19,584 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,584 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000035_0 is allowed to commit now -2017-02-18 09:49:19,593 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000035_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000035 -2017-02-18 09:49:19,598 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:19,600 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000035_0' done. -2017-02-18 09:49:19,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000035_0 -2017-02-18 09:49:19,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000036_0 -2017-02-18 09:49:19,613 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:19,614 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:19,616 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@f70f500 -2017-02-18 09:49:19,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:19,636 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000036_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:19,643 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 31 len: 83 to MEMORY -2017-02-18 09:49:19,653 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:19,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->31 -2017-02-18 09:49:19,661 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 19 len: 65 to MEMORY -2017-02-18 09:49:19,667 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:19,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 2, commitMemory -> 31, usedMemory ->50 -2017-02-18 09:49:19,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 19 len: 67 to MEMORY -2017-02-18 09:49:19,680 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:19,684 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 3, commitMemory -> 50, usedMemory ->69 -2017-02-18 09:49:19,685 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:19,686 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,686 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:19,687 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:19,690 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 54 bytes -2017-02-18 09:49:19,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 69 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk -2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:19,727 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 60 bytes -2017-02-18 09:49:19,729 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,767 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000036_0 is done. And is in the process of committing -2017-02-18 09:49:19,768 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,768 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000036_0 is allowed to commit now -2017-02-18 09:49:19,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000036_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000036 -2017-02-18 09:49:19,773 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:19,775 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000036_0' done. -2017-02-18 09:49:19,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000036_0 -2017-02-18 09:49:19,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000037_0 -2017-02-18 09:49:19,783 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:19,785 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:19,786 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4624d4d1 -2017-02-18 09:49:19,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:19,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000037_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:19,825 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 68 to MEMORY -2017-02-18 09:49:19,830 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:19,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 -2017-02-18 09:49:19,840 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 55 to MEMORY -2017-02-18 09:49:19,848 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:19,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->33 -2017-02-18 09:49:19,859 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:19,865 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:19,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 33, usedMemory ->35 -2017-02-18 09:49:19,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:19,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:19,873 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:19,873 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 23 bytes -2017-02-18 09:49:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 35 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:19,901 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 87 bytes from disk -2017-02-18 09:49:19,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:19,903 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:19,919 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes -2017-02-18 09:49:19,920 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,958 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000037_0 is done. And is in the process of committing -2017-02-18 09:49:19,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:19,960 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000037_0 is allowed to commit now -2017-02-18 09:49:19,961 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000037_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000037 -2017-02-18 09:49:19,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:19,972 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000037_0' done. -2017-02-18 09:49:19,973 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000037_0 -2017-02-18 09:49:19,974 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000038_0 -2017-02-18 09:49:19,984 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:19,984 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:19,985 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7d2b79ba -2017-02-18 09:49:19,993 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:20,012 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000038_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:20,020 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 70 to MEMORY -2017-02-18 09:49:20,027 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:20,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 -2017-02-18 09:49:20,033 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,038 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:20,042 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->24 -2017-02-18 09:49:20,051 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,058 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:20,062 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 24, usedMemory ->26 -2017-02-18 09:49:20,064 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:20,065 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,065 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:20,066 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:20,067 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes -2017-02-18 09:49:20,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 26 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:20,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 84 bytes from disk -2017-02-18 09:49:20,094 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:20,094 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:20,098 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes -2017-02-18 09:49:20,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,145 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000038_0 is done. And is in the process of committing -2017-02-18 09:49:20,147 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,147 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000038_0 is allowed to commit now -2017-02-18 09:49:20,148 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000038_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000038 -2017-02-18 09:49:20,151 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:20,153 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000038_0' done. -2017-02-18 09:49:20,153 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000038_0 -2017-02-18 09:49:20,154 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000039_0 -2017-02-18 09:49:20,162 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:20,163 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:20,164 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@53944f2c -2017-02-18 09:49:20,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:20,192 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000039_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:20,199 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 59 to MEMORY -2017-02-18 09:49:20,207 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:20,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 -2017-02-18 09:49:20,220 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,230 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:20,232 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 -2017-02-18 09:49:20,244 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,249 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:20,252 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 -2017-02-18 09:49:20,254 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:20,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,255 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:20,257 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:20,257 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:20,272 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:20,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 73 bytes from disk -2017-02-18 09:49:20,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:20,298 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:20,301 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:20,304 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,337 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000039_0 is done. And is in the process of committing -2017-02-18 09:49:20,338 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,338 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000039_0 is allowed to commit now -2017-02-18 09:49:20,340 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000039_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000039 -2017-02-18 09:49:20,345 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:20,346 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000039_0' done. -2017-02-18 09:49:20,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000039_0 -2017-02-18 09:49:20,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000040_0 -2017-02-18 09:49:20,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:20,362 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:20,369 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4cafad79 -2017-02-18 09:49:20,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:20,392 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000040_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:20,394 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% -2017-02-18 09:49:20,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,406 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:20,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:20,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,423 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:20,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:20,429 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,445 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:20,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:20,450 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:20,451 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,451 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:20,464 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:20,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:20,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:20,519 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:20,520 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,570 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000040_0 is done. And is in the process of committing -2017-02-18 09:49:20,571 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,571 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000040_0 is allowed to commit now -2017-02-18 09:49:20,572 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000040_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000040 -2017-02-18 09:49:20,573 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:20,574 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000040_0' done. -2017-02-18 09:49:20,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000040_0 -2017-02-18 09:49:20,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000041_0 -2017-02-18 09:49:20,594 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:20,595 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:20,595 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@61da1de -2017-02-18 09:49:20,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:20,635 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000041_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:20,637 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 44 len: 89 to MEMORY -2017-02-18 09:49:20,645 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 44 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:20,645 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 44, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->44 -2017-02-18 09:49:20,647 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 22 len: 69 to MEMORY -2017-02-18 09:49:20,664 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:20,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 2, commitMemory -> 44, usedMemory ->66 -2017-02-18 09:49:20,666 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:20,687 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:20,698 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 66, usedMemory ->68 -2017-02-18 09:49:20,702 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:20,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,704 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:20,705 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:20,705 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 54 bytes -2017-02-18 09:49:20,719 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 68 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:20,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 111 bytes from disk -2017-02-18 09:49:20,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:20,736 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:20,737 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 57 bytes -2017-02-18 09:49:20,740 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000041_0 is done. And is in the process of committing -2017-02-18 09:49:20,777 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,778 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000041_0 is allowed to commit now -2017-02-18 09:49:20,778 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000041_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000041 -2017-02-18 09:49:20,779 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:20,781 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000041_0' done. -2017-02-18 09:49:20,787 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000041_0 -2017-02-18 09:49:20,788 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000042_0 -2017-02-18 09:49:20,795 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:20,796 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:20,798 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@207bfdc3 -2017-02-18 09:49:20,804 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:20,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000042_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:20,830 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 34 len: 81 to MEMORY -2017-02-18 09:49:20,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 34 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:20,846 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 34, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->34 -2017-02-18 09:49:20,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 58 to MEMORY -2017-02-18 09:49:20,856 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:20,863 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 34, usedMemory ->46 -2017-02-18 09:49:20,866 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 22 len: 69 to MEMORY -2017-02-18 09:49:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:20,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 3, commitMemory -> 46, usedMemory ->68 -2017-02-18 09:49:20,879 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:20,880 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:20,881 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:20,881 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 50 bytes -2017-02-18 09:49:20,893 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 68 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:20,906 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 113 bytes from disk -2017-02-18 09:49:20,907 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:20,908 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:20,929 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 58 bytes -2017-02-18 09:49:20,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,962 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000042_0 is done. And is in the process of committing -2017-02-18 09:49:20,963 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:20,964 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000042_0 is allowed to commit now -2017-02-18 09:49:20,964 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000042_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000042 -2017-02-18 09:49:20,965 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:20,968 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000042_0' done. -2017-02-18 09:49:20,972 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000042_0 -2017-02-18 09:49:20,973 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000043_0 -2017-02-18 09:49:20,980 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:20,981 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:20,984 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1681a6f6 -2017-02-18 09:49:20,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:21,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000043_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:21,018 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 71 to MEMORY -2017-02-18 09:49:21,024 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:21,029 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 -2017-02-18 09:49:21,036 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,045 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:21,048 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 -2017-02-18 09:49:21,050 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,055 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:21,061 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 -2017-02-18 09:49:21,062 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:21,063 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,064 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:21,065 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:21,065 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes -2017-02-18 09:49:21,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:21,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk -2017-02-18 09:49:21,104 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:21,104 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:21,105 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes -2017-02-18 09:49:21,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,147 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000043_0 is done. And is in the process of committing -2017-02-18 09:49:21,148 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,149 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000043_0 is allowed to commit now -2017-02-18 09:49:21,149 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000043_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000043 -2017-02-18 09:49:21,150 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000043_0' done. -2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000043_0 -2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000044_0 -2017-02-18 09:49:21,175 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:21,176 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:21,177 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3306c9fa -2017-02-18 09:49:21,182 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:21,196 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000044_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:21,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 24 len: 73 to MEMORY -2017-02-18 09:49:21,234 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 24 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:21,234 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 24, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->24 -2017-02-18 09:49:21,248 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 24 len: 71 to MEMORY -2017-02-18 09:49:21,249 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 24 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:21,254 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 24, inMemoryMapOutputs.size() -> 2, commitMemory -> 24, usedMemory ->48 -2017-02-18 09:49:21,255 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 13 len: 60 to MEMORY -2017-02-18 09:49:21,257 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:21,268 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 48, usedMemory ->61 -2017-02-18 09:49:21,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:21,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:21,271 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:21,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 40 bytes -2017-02-18 09:49:21,279 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 61 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:21,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 105 bytes from disk -2017-02-18 09:49:21,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:21,316 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:21,317 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 50 bytes -2017-02-18 09:49:21,322 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,353 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000044_0 is done. And is in the process of committing -2017-02-18 09:49:21,356 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,356 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000044_0 is allowed to commit now -2017-02-18 09:49:21,365 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000044_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000044 -2017-02-18 09:49:21,376 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:21,376 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000044_0' done. -2017-02-18 09:49:21,377 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000044_0 -2017-02-18 09:49:21,377 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000045_0 -2017-02-18 09:49:21,385 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:21,386 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:21,388 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1d01f7c1 -2017-02-18 09:49:21,396 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:21,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000045_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:21,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,443 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:21,443 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:21,451 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,454 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:21,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:21,471 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,477 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:21,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:21,482 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:21,483 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,483 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:21,484 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:21,484 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:21,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:21,522 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:21,523 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:21,526 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:21,528 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:21,543 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,578 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000045_0 is done. And is in the process of committing -2017-02-18 09:49:21,579 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,579 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000045_0 is allowed to commit now -2017-02-18 09:49:21,580 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000045_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000045 -2017-02-18 09:49:21,581 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:21,583 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000045_0' done. -2017-02-18 09:49:21,584 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000045_0 -2017-02-18 09:49:21,585 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000046_0 -2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6a45aa68 -2017-02-18 09:49:21,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:21,617 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000046_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:21,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 13 len: 60 to MEMORY -2017-02-18 09:49:21,635 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:21,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->13 -2017-02-18 09:49:21,656 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,661 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:21,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 13, usedMemory ->15 -2017-02-18 09:49:21,667 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,673 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:21,694 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17 -2017-02-18 09:49:21,694 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:21,695 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:21,696 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:21,696 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:21,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 74 bytes from disk -2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:21,740 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:21,742 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,768 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000046_0 is done. And is in the process of committing -2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000046_0 is allowed to commit now -2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000046_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000046 -2017-02-18 09:49:21,774 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:21,777 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000046_0' done. -2017-02-18 09:49:21,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000046_0 -2017-02-18 09:49:21,781 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000047_0 -2017-02-18 09:49:21,795 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:21,796 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:21,797 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2c3c5ba6 -2017-02-18 09:49:21,801 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:21,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000047_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:21,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 14 len: 62 to MEMORY -2017-02-18 09:49:21,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 14 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:21,841 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 14, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->14 -2017-02-18 09:49:21,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,852 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:21,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 14, usedMemory ->16 -2017-02-18 09:49:21,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:21,862 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:21,864 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 16, usedMemory ->18 -2017-02-18 09:49:21,866 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:21,867 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,867 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:21,868 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:21,868 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:21,898 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 18 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:21,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 76 bytes from disk -2017-02-18 09:49:21,913 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:21,914 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:21,915 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:21,918 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,946 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000047_0 is done. And is in the process of committing -2017-02-18 09:49:21,958 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:21,967 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000047_0 is allowed to commit now -2017-02-18 09:49:21,969 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000047_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000047 -2017-02-18 09:49:21,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:21,970 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000047_0' done. -2017-02-18 09:49:21,976 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000047_0 -2017-02-18 09:49:21,976 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000048_0 -2017-02-18 09:49:21,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:21,986 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:21,986 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1ce764b2 -2017-02-18 09:49:21,991 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:22,015 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000048_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:22,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:22,027 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:22,034 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 -2017-02-18 09:49:22,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:22,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:22,051 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 -2017-02-18 09:49:22,067 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:22,068 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:22,084 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 -2017-02-18 09:49:22,084 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:22,085 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:22,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:22,086 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:22,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:22,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:22,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk -2017-02-18 09:49:22,141 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:22,141 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:22,142 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes -2017-02-18 09:49:22,155 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:22,184 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000048_0 is done. And is in the process of committing -2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000048_0 is allowed to commit now -2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000048_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000048 -2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000048_0' done. -2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000048_0 -2017-02-18 09:49:22,188 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000049_0 -2017-02-18 09:49:22,193 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 -2017-02-18 09:49:22,194 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] -2017-02-18 09:49:22,194 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@518f2876 -2017-02-18 09:49:22,217 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 -2017-02-18 09:49:22,230 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000049_0 Thread started: EventFetcher for fetching Map Completion Events -2017-02-18 09:49:22,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 57 to MEMORY -2017-02-18 09:49:22,246 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 -2017-02-18 09:49:22,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 -2017-02-18 09:49:22,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:22,248 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 -2017-02-18 09:49:22,248 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 -2017-02-18 09:49:22,249 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY -2017-02-18 09:49:22,250 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 -2017-02-18 09:49:22,250 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 -2017-02-18 09:49:22,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning -2017-02-18 09:49:22,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:22,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs -2017-02-18 09:49:22,271 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments -2017-02-18 09:49:22,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:22,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit -2017-02-18 09:49:22,300 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 71 bytes from disk -2017-02-18 09:49:22,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce -2017-02-18 09:49:22,302 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments -2017-02-18 09:49:22,303 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes -2017-02-18 09:49:22,314 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:22,356 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000049_0 is done. And is in the process of committing -2017-02-18 09:49:22,358 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. -2017-02-18 09:49:22,358 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000049_0 is allowed to commit now -2017-02-18 09:49:22,359 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000049_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000049 -2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce -2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000049_0' done. -2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000049_0 -2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. -2017-02-18 09:49:22,639 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1938545376_0001 + Bytes Written=2051650 +2017-02-19 04:44:24,115 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-19 04:44:26,229 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-19 04:44:26,254 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-19 04:44:27,594 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-19 04:44:27,671 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-19 04:44:28,071 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-19 04:44:29,250 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local937133229_0001 +2017-02-19 04:44:30,550 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-19 04:44:30,551 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local937133229_0001 +2017-02-19 04:44:30,573 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-19 04:44:30,615 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 04:44:30,621 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-19 04:44:30,931 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-19 04:44:30,939 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local937133229_0001_m_000000_0 +2017-02-19 04:44:31,136 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 04:44:31,241 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 04:44:31,247 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-19 04:44:31,687 INFO org.apache.hadoop.mapreduce.Job: Job job_local937133229_0001 running in uber mode : false +2017-02-19 04:44:31,697 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-19 04:44:32,062 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 04:44:32,063 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 04:44:32,063 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 04:44:32,064 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 04:44:32,065 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 04:44:32,078 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 04:44:32,102 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 04:44:37,228 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:37,724 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-19 04:44:40,240 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:40,743 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-19 04:44:43,245 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:43,755 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-19 04:44:46,252 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:46,768 INFO org.apache.hadoop.mapreduce.Job: map 8% reduce 0% +2017-02-19 04:44:49,255 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:49,783 INFO org.apache.hadoop.mapreduce.Job: map 11% reduce 0% +2017-02-19 04:44:52,265 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:52,817 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-19 04:44:55,276 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:55,828 INFO org.apache.hadoop.mapreduce.Job: map 17% reduce 0% +2017-02-19 04:44:58,277 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:44:58,846 INFO org.apache.hadoop.mapreduce.Job: map 20% reduce 0% +2017-02-19 04:45:00,846 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:45:00,848 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 04:45:00,849 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 04:45:00,849 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 5624480; bufvoid = 104857600 +2017-02-19 04:45:00,849 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25005312(100021248); length = 1209085/6553600 +2017-02-19 04:45:01,286 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-19 04:45:01,853 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-19 04:45:04,131 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 04:45:04,159 INFO org.apache.hadoop.mapred.Task: Task:attempt_local937133229_0001_m_000000_0 is done. And is in the process of committing +2017-02-19 04:45:04,165 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 04:45:04,168 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local937133229_0001_m_000000_0' done. +2017-02-19 04:45:04,169 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local937133229_0001_m_000000_0 +2017-02-19 04:45:04,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local937133229_0001_m_000001_0 +2017-02-19 04:45:04,177 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 04:45:04,178 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 04:45:04,180 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-19 04:45:04,474 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 04:45:04,476 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 04:45:04,476 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 04:45:04,476 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 04:45:04,477 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 04:45:04,484 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 04:45:04,491 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 04:45:04,865 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 04:45:10,187 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:45:10,893 INFO org.apache.hadoop.mapreduce.Job: map 47% reduce 0% +2017-02-19 04:45:13,189 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:45:13,639 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:45:13,640 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 04:45:13,641 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 04:45:13,641 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 2143719; bufvoid = 104857600 +2017-02-19 04:45:13,642 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25715252(102861008); length = 499145/6553600 +2017-02-19 04:45:13,920 INFO org.apache.hadoop.mapreduce.Job: map 54% reduce 0% +2017-02-19 04:45:14,652 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 04:45:14,658 INFO org.apache.hadoop.mapred.Task: Task:attempt_local937133229_0001_m_000001_0 is done. And is in the process of committing +2017-02-19 04:45:14,665 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 04:45:14,669 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local937133229_0001_m_000001_0' done. +2017-02-19 04:45:14,670 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local937133229_0001_m_000001_0 +2017-02-19 04:45:14,670 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local937133229_0001_m_000002_0 +2017-02-19 04:45:14,677 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 04:45:14,678 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 04:45:14,680 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-19 04:45:14,928 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 04:45:14,970 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-19 04:45:14,979 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-19 04:45:14,980 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-19 04:45:14,986 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-19 04:45:14,987 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-19 04:45:14,993 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-19 04:45:14,994 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-19 04:45:20,687 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:45:20,949 INFO org.apache.hadoop.mapreduce.Job: map 88% reduce 0% +2017-02-19 04:45:21,079 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-19 04:45:21,083 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-19 04:45:21,083 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-19 04:45:21,084 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 1596794; bufvoid = 104857600 +2017-02-19 04:45:21,084 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 25892496(103569984); length = 321901/6553600 +2017-02-19 04:45:21,616 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-19 04:45:21,640 INFO org.apache.hadoop.mapred.Task: Task:attempt_local937133229_0001_m_000002_0 is done. And is in the process of committing +2017-02-19 04:45:21,647 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-19 04:45:21,650 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local937133229_0001_m_000002_0' done. +2017-02-19 04:45:21,650 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local937133229_0001_m_000002_0 +2017-02-19 04:45:21,651 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-19 04:45:21,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-19 04:45:21,658 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local937133229_0001_r_000000_0 +2017-02-19 04:45:21,696 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-19 04:45:21,697 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-19 04:45:21,712 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@38981d47 +2017-02-19 04:45:21,822 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-19 04:45:21,872 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local937133229_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-19 04:45:21,958 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-19 04:45:22,058 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local937133229_0001_m_000001_0 decomp: 2393295 len: 2393299 to MEMORY +2017-02-19 04:45:22,107 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2393295 bytes from map-output for attempt_local937133229_0001_m_000001_0 +2017-02-19 04:45:22,130 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2393295, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2393295 +2017-02-19 04:45:22,154 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local937133229_0001_m_000002_0 decomp: 1757748 len: 1757752 to MEMORY +2017-02-19 04:45:22,166 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1757748 bytes from map-output for attempt_local937133229_0001_m_000002_0 +2017-02-19 04:45:22,198 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1757748, inMemoryMapOutputs.size() -> 2, commitMemory -> 2393295, usedMemory ->4151043 +2017-02-19 04:45:22,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local937133229_0001_m_000000_0 decomp: 6229026 len: 6229030 to MEMORY +2017-02-19 04:45:22,333 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 6229026 bytes from map-output for attempt_local937133229_0001_m_000000_0 +2017-02-19 04:45:22,343 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 6229026, inMemoryMapOutputs.size() -> 3, commitMemory -> 4151043, usedMemory ->10380069 +2017-02-19 04:45:22,346 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-19 04:45:22,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-19 04:45:22,347 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-19 04:45:22,371 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-19 04:45:22,381 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 10380058 bytes +2017-02-19 04:45:25,071 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 10380069 bytes to disk to satisfy reduce memory limit +2017-02-19 04:45:25,073 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 10380069 bytes from disk +2017-02-19 04:45:25,074 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-19 04:45:25,074 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-19 04:45:25,075 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 10380062 bytes +2017-02-19 04:45:25,075 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-19 04:45:25,158 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-19 04:45:27,728 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 04:45:27,985 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 68% +2017-02-19 04:45:30,735 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 04:45:31,013 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 69% +2017-02-19 04:45:33,737 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 04:45:34,024 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 85% +2017-02-19 04:45:34,842 INFO org.apache.hadoop.mapred.Task: Task:attempt_local937133229_0001_r_000000_0 is done. And is in the process of committing +2017-02-19 04:45:34,848 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 04:45:34,848 INFO org.apache.hadoop.mapred.Task: Task attempt_local937133229_0001_r_000000_0 is allowed to commit now +2017-02-19 04:45:34,849 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local937133229_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2/_temporary/0/task_local937133229_0001_r_000000 +2017-02-19 04:45:34,858 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-19 04:45:34,861 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local937133229_0001_r_000000_0' done. +2017-02-19 04:45:34,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local937133229_0001_r_000000_0 +2017-02-19 04:45:34,862 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-19 04:45:34,935 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local937133229_0001 java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest @@ -10392,32 +671,33 @@ Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.Http at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) ... 1 more -2017-02-18 09:49:23,414 INFO org.apache.hadoop.mapreduce.Job: Job job_local1938545376_0001 failed with state FAILED due to: NA -2017-02-18 09:49:23,878 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 +2017-02-19 04:45:35,032 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-19 04:45:35,032 INFO org.apache.hadoop.mapreduce.Job: Job job_local937133229_0001 failed with state FAILED due to: NA +2017-02-19 04:45:35,147 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 File System Counters - FILE: Number of bytes read=1378254888 - FILE: Number of bytes written=15479292 + FILE: Number of bytes read=110497367 + FILE: Number of bytes written=49147132 FILE: Number of read operations=0 FILE: Number of large read operations=0 FILE: Number of write operations=0 Map-Reduce Framework Map input records=507535 - Map output records=4678719 - Map output bytes=43638689 - Map output materialized bytes=8043 + Map output records=507535 + Map output bytes=9364993 + Map output materialized bytes=10380081 Input split bytes=351 - Combine input records=4678719 - Combine output records=131 - Reduce input groups=77 - Reduce shuffle bytes=8043 - Reduce input records=131 - Reduce output records=77 - Spilled Records=262 - Shuffled Maps =150 + Combine input records=0 + Combine output records=0 + Reduce input groups=87026 + Reduce shuffle bytes=10380081 + Reduce input records=507535 + Reduce output records=87026 + Spilled Records=1015070 + Shuffled Maps =3 Failed Shuffles=0 - Merged Map outputs=150 - GC time elapsed (ms)=866 - Total committed heap usage (bytes)=10455764992 + Merged Map outputs=3 + GC time elapsed (ms)=785 + Total committed heap usage (bytes)=773603328 Shuffle Errors BAD_ID=0 CONNECTION=0 @@ -10428,4 +708,4 @@ Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.Http File Input Format Counters Bytes Read=26057874 File Output Format Counters - Bytes Written=1290 + Bytes Written=2051650 diff --git a/Assign1/hadoop.log.2017-02-18 b/Assign1/hadoop.log.2017-02-18 new file mode 100644 index 0000000000000000000000000000000000000000..e1598461a5f42e614dcd64efddb73413b1d8db7e --- /dev/null +++ b/Assign1/hadoop.log.2017-02-18 @@ -0,0 +1,10731 @@ +2017-02-18 02:36:22,445 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 02:36:24,389 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 02:36:24,424 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 02:37:48,329 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 02:37:49,998 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 02:37:50,000 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 02:37:51,700 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 02:37:51,741 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 +2017-02-18 02:37:52,081 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 +2017-02-18 02:37:53,299 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1560003292_0001 +2017-02-18 02:37:54,908 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 02:37:54,909 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1560003292_0001 +2017-02-18 02:37:54,923 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 02:37:54,983 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 02:37:54,991 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 02:37:55,317 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 02:37:55,321 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1560003292_0001_m_000000_0 +2017-02-18 02:37:55,573 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 02:37:55,691 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 02:37:55,717 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/Assign1/pg100.txt:0+5589886 +2017-02-18 02:37:56,108 INFO org.apache.hadoop.mapreduce.Job: Job job_local1560003292_0001 running in uber mode : false +2017-02-18 02:37:56,130 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 02:37:56,758 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 02:37:56,760 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 02:37:56,761 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 02:37:56,761 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 02:37:56,761 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 02:37:56,794 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 02:38:01,693 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 02:38:02,182 INFO org.apache.hadoop.mapreduce.Job: map 48% reduce 0% +2017-02-18 02:38:02,562 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 02:38:02,565 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 02:38:02,566 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 02:38:02,567 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 02:38:02,567 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 02:38:04,704 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 02:38:05,194 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 02:38:07,715 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 02:38:10,719 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 02:38:11,523 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 02:38:11,593 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1560003292_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 02:38:11,598 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 02:38:11,605 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1560003292_0001_m_000000_0' done. +2017-02-18 02:38:11,609 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1560003292_0001_m_000000_0 +2017-02-18 02:38:11,611 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 02:38:11,630 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 02:38:11,631 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1560003292_0001_r_000000_0 +2017-02-18 02:38:11,696 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 02:38:11,696 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 02:38:11,724 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7b948a59 +2017-02-18 02:38:11,858 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 02:38:11,897 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1560003292_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 02:38:12,182 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1560003292_0001_m_000000_0 decomp: 11218538 len: 11218542 to MEMORY +2017-02-18 02:38:12,220 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 02:38:12,391 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11218538 bytes from map-output for attempt_local1560003292_0001_m_000000_0 +2017-02-18 02:38:12,416 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11218538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11218538 +2017-02-18 02:38:12,427 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 02:38:12,428 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 02:38:12,428 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 02:38:12,535 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 02:38:12,553 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes +2017-02-18 02:38:16,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 11218538 bytes to disk to satisfy reduce memory limit +2017-02-18 02:38:16,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 11218542 bytes from disk +2017-02-18 02:38:16,461 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 02:38:16,461 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 02:38:16,469 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes +2017-02-18 02:38:16,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 02:38:16,598 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 02:38:17,735 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 02:38:18,244 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 68% +2017-02-18 02:38:20,745 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 02:38:21,257 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 88% +2017-02-18 02:38:21,893 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1560003292_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 02:38:21,902 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 02:38:21,907 INFO org.apache.hadoop.mapred.Task: Task attempt_local1560003292_0001_r_000000_0 is allowed to commit now +2017-02-18 02:38:21,909 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1560003292_0001_r_000000_0' to file:/home/cloudera/workspace/Assign1/output/_temporary/0/task_local1560003292_0001_r_000000 +2017-02-18 02:38:21,915 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 02:38:21,917 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1560003292_0001_r_000000_0' done. +2017-02-18 02:38:21,918 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1560003292_0001_r_000000_0 +2017-02-18 02:38:21,919 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 02:38:21,971 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1560003292_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 02:38:22,259 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 02:38:22,260 INFO org.apache.hadoop.mapreduce.Job: Job job_local1560003292_0001 failed with state FAILED due to: NA +2017-02-18 02:38:22,337 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=33617226 + FILE: Number of bytes written=34935992 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=124787 + Map output records=1023444 + Map output bytes=9171648 + Map output materialized bytes=11218542 + Input split bytes=112 + Combine input records=0 + Combine output records=0 + Reduce input groups=67780 + Reduce shuffle bytes=11218542 + Reduce input records=1023444 + Reduce output records=67780 + Spilled Records=2046888 + Shuffled Maps =1 + Failed Shuffles=0 + Merged Map outputs=1 + GC time elapsed (ms)=338 + Total committed heap usage (bytes)=331227136 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=5589886 + File Output Format Counters + Bytes Written=726624 +2017-02-18 04:02:41,586 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 04:02:43,970 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 04:02:44,026 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 04:02:46,105 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 04:02:46,164 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 +2017-02-18 04:02:46,530 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 +2017-02-18 04:02:47,436 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2033431332_0001 +2017-02-18 04:02:49,019 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 04:02:49,020 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2033431332_0001 +2017-02-18 04:02:49,030 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 04:02:49,072 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 04:02:49,099 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 04:02:49,385 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 04:02:49,388 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033431332_0001_m_000000_0 +2017-02-18 04:02:49,620 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 04:02:49,757 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 04:02:49,775 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589886 +2017-02-18 04:02:50,212 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033431332_0001 running in uber mode : false +2017-02-18 04:02:50,219 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 04:02:50,512 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 04:02:50,513 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 04:02:50,531 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 04:02:55,692 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 04:02:56,253 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 04:02:56,270 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 04:02:56,274 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 04:02:56,276 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 04:02:56,276 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 04:02:56,277 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 04:02:58,725 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 04:02:59,264 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 04:03:01,728 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 04:03:04,732 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 04:03:05,609 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 04:03:05,656 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033431332_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 04:03:05,670 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 04:03:05,676 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033431332_0001_m_000000_0' done. +2017-02-18 04:03:05,678 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033431332_0001_m_000000_0 +2017-02-18 04:03:05,679 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 04:03:05,699 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 04:03:05,700 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033431332_0001_r_000000_0 +2017-02-18 04:03:05,762 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 04:03:05,763 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 04:03:05,766 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7075f914 +2017-02-18 04:03:05,868 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 04:03:05,897 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033431332_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 04:03:06,065 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2033431332_0001_m_000000_0 decomp: 11218538 len: 11218542 to MEMORY +2017-02-18 04:03:06,226 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11218538 bytes from map-output for attempt_local2033431332_0001_m_000000_0 +2017-02-18 04:03:06,238 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11218538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11218538 +2017-02-18 04:03:06,255 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 04:03:06,256 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 04:03:06,257 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 04:03:06,313 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 04:03:06,330 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 04:03:06,334 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes +2017-02-18 04:03:09,673 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 11218538 bytes to disk to satisfy reduce memory limit +2017-02-18 04:03:09,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 11218542 bytes from disk +2017-02-18 04:03:09,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 04:03:09,676 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 04:03:09,677 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes +2017-02-18 04:03:09,689 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 04:03:09,725 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 04:03:11,785 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 04:03:12,342 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 74% +2017-02-18 04:03:14,375 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033431332_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 04:03:14,384 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 04:03:14,388 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033431332_0001_r_000000_0 is allowed to commit now +2017-02-18 04:03:14,390 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033431332_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local2033431332_0001_r_000000 +2017-02-18 04:03:14,396 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 04:03:14,398 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033431332_0001_r_000000_0' done. +2017-02-18 04:03:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033431332_0001_r_000000_0 +2017-02-18 04:03:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 04:03:14,461 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2033431332_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 04:03:15,360 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 04:03:15,361 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033431332_0001 failed with state FAILED due to: NA +2017-02-18 04:03:15,411 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=33617234 + FILE: Number of bytes written=34936048 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=124787 + Map output records=1023444 + Map output bytes=9171648 + Map output materialized bytes=11218542 + Input split bytes=116 + Combine input records=0 + Combine output records=0 + Reduce input groups=67780 + Reduce shuffle bytes=11218542 + Reduce input records=1023444 + Reduce output records=67780 + Spilled Records=2046888 + Shuffled Maps =1 + Failed Shuffles=0 + Merged Map outputs=1 + GC time elapsed (ms)=323 + Total committed heap usage (bytes)=331227136 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=5589886 + File Output Format Counters + Bytes Written=726624 +2017-02-18 04:04:46,638 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 04:04:48,239 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 04:04:48,274 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 04:04:49,758 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 04:04:49,787 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 +2017-02-18 04:04:50,101 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 +2017-02-18 04:04:50,998 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1076588983_0001 +2017-02-18 04:04:52,443 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 04:04:52,444 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1076588983_0001 +2017-02-18 04:04:52,456 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 04:04:52,491 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 04:04:52,520 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 04:04:52,807 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 04:04:52,810 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1076588983_0001_m_000000_0 +2017-02-18 04:04:52,978 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 04:04:53,055 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 04:04:53,060 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 04:04:53,448 INFO org.apache.hadoop.mapreduce.Job: Job job_local1076588983_0001 running in uber mode : false +2017-02-18 04:04:53,450 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 04:04:53,697 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 04:04:53,717 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 04:04:53,718 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 04:04:53,719 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 04:04:53,719 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 04:04:53,737 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 04:04:53,759 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 04:04:59,032 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 04:04:59,495 INFO org.apache.hadoop.mapreduce.Job: map 54% reduce 0% +2017-02-18 04:04:59,714 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 04:04:59,718 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 04:04:59,718 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 04:04:59,719 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 04:04:59,719 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 04:05:02,040 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 04:05:02,507 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 04:05:05,048 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 04:05:08,050 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 04:05:09,429 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 04:05:09,462 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1076588983_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 04:05:09,468 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 04:05:09,471 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1076588983_0001_m_000000_0' done. +2017-02-18 04:05:09,472 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1076588983_0001_m_000000_0 +2017-02-18 04:05:09,473 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 04:05:09,492 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 04:05:09,492 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1076588983_0001_r_000000_0 +2017-02-18 04:05:09,541 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 04:05:09,542 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 04:05:09,547 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@113db302 +2017-02-18 04:05:09,550 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 04:05:09,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 04:05:09,670 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1076588983_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 04:05:09,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1076588983_0001_m_000000_0 decomp: 11218538 len: 11218542 to MEMORY +2017-02-18 04:05:09,965 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11218538 bytes from map-output for attempt_local1076588983_0001_m_000000_0 +2017-02-18 04:05:09,989 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11218538, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11218538 +2017-02-18 04:05:10,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 04:05:10,005 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 04:05:10,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 04:05:10,244 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 04:05:10,251 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes +2017-02-18 04:05:13,750 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 11218538 bytes to disk to satisfy reduce memory limit +2017-02-18 04:05:13,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 11218542 bytes from disk +2017-02-18 04:05:13,760 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 04:05:13,763 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 04:05:13,764 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 11218535 bytes +2017-02-18 04:05:13,769 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 04:05:13,825 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 04:05:15,562 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 04:05:15,597 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 69% +2017-02-18 04:05:18,569 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 04:05:18,608 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 98% +2017-02-18 04:05:18,772 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1076588983_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 04:05:18,780 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 04:05:18,784 INFO org.apache.hadoop.mapred.Task: Task attempt_local1076588983_0001_r_000000_0 is allowed to commit now +2017-02-18 04:05:18,785 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1076588983_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1076588983_0001_r_000000 +2017-02-18 04:05:18,790 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 04:05:18,792 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1076588983_0001_r_000000_0' done. +2017-02-18 04:05:18,793 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1076588983_0001_r_000000_0 +2017-02-18 04:05:18,794 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 04:05:18,863 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1076588983_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 04:05:19,616 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 04:05:19,617 INFO org.apache.hadoop.mapreduce.Job: Job job_local1076588983_0001 failed with state FAILED due to: NA +2017-02-18 04:05:19,660 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=33617240 + FILE: Number of bytes written=34936048 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=124787 + Map output records=1023444 + Map output bytes=9171648 + Map output materialized bytes=11218542 + Input split bytes=116 + Combine input records=0 + Combine output records=0 + Reduce input groups=67780 + Reduce shuffle bytes=11218542 + Reduce input records=1023444 + Reduce output records=67780 + Spilled Records=2046888 + Shuffled Maps =1 + Failed Shuffles=0 + Merged Map outputs=1 + GC time elapsed (ms)=272 + Total committed heap usage (bytes)=331227136 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=5589889 + File Output Format Counters + Bytes Written=726624 +2017-02-18 06:19:24,805 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 06:20:24,453 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 06:20:27,186 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 06:20:27,188 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 06:20:29,525 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 06:20:29,602 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 +2017-02-18 06:20:30,050 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 +2017-02-18 06:20:31,621 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local224597268_0001 +2017-02-18 06:20:33,564 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 06:20:33,566 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local224597268_0001 +2017-02-18 06:20:33,595 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 06:20:33,663 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:20:33,685 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 06:20:34,258 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 06:20:34,261 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_m_000000_0 +2017-02-18 06:20:34,553 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:20:34,596 INFO org.apache.hadoop.mapreduce.Job: Job job_local224597268_0001 running in uber mode : false +2017-02-18 06:20:34,604 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 06:20:34,725 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:20:34,763 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:20:37,416 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:20:37,502 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:20:37,565 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:20:43,656 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:20:44,652 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 06:20:45,757 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:20:45,762 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:20:45,765 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:20:45,766 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 06:20:45,767 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 06:20:46,673 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:20:47,658 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 06:20:49,678 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:20:52,682 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:20:55,683 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:20:57,582 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:20:57,630 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 06:20:57,636 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:20:57,643 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_m_000000_0' done. +2017-02-18 06:20:57,645 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_m_000000_0 +2017-02-18 06:20:57,646 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 06:20:57,681 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:20:57,771 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 06:20:57,772 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000000_0 +2017-02-18 06:20:57,834 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:20:57,835 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:20:57,875 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@8a93430 +2017-02-18 06:20:58,015 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:20:58,054 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:20:58,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 991747 len: 991751 to MEMORY +2017-02-18 06:20:58,388 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:20:58,407 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 +2017-02-18 06:20:58,429 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:20:58,431 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:20:58,432 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:20:58,510 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:20:58,520 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes +2017-02-18 06:20:59,614 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 991747 bytes to disk to satisfy reduce memory limit +2017-02-18 06:20:59,616 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 991751 bytes from disk +2017-02-18 06:20:59,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:20:59,629 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:20:59,632 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes +2017-02-18 06:20:59,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:20:59,699 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 06:21:01,420 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 06:21:01,442 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:01,443 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000000_0 is allowed to commit now +2017-02-18 06:21:01,444 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000000 +2017-02-18 06:21:01,458 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:01,459 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000000_0' done. +2017-02-18 06:21:01,459 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000000_0 +2017-02-18 06:21:01,467 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000001_0 +2017-02-18 06:21:01,476 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:01,477 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:01,490 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1798ca83 +2017-02-18 06:21:01,501 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:01,518 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:01,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1980898 len: 1980902 to MEMORY +2017-02-18 06:21:01,604 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:01,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 +2017-02-18 06:21:01,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:01,606 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:01,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:01,608 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:01,608 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes +2017-02-18 06:21:01,686 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 06:21:02,479 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1980898 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1980902 bytes from disk +2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:02,484 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes +2017-02-18 06:21:02,485 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:03,660 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 06:21:03,694 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:03,696 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000001_0 is allowed to commit now +2017-02-18 06:21:03,702 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000001 +2017-02-18 06:21:03,708 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:03,714 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000001_0' done. +2017-02-18 06:21:03,715 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000001_0 +2017-02-18 06:21:03,716 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000002_0 +2017-02-18 06:21:03,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:03,728 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:03,729 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@29d1cb0d +2017-02-18 06:21:03,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:03,757 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:03,776 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1199435 len: 1199439 to MEMORY +2017-02-18 06:21:03,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:03,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 +2017-02-18 06:21:03,814 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:03,815 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:03,815 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:03,819 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:03,819 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes +2017-02-18 06:21:04,243 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1199435 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:04,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1199439 bytes from disk +2017-02-18 06:21:04,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:04,248 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:04,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes +2017-02-18 06:21:04,250 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:04,693 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 06:21:04,762 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 06:21:04,780 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:04,785 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000002_0 is allowed to commit now +2017-02-18 06:21:04,792 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000002 +2017-02-18 06:21:04,798 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:04,807 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000002_0' done. +2017-02-18 06:21:04,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000002_0 +2017-02-18 06:21:04,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000003_0 +2017-02-18 06:21:04,822 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:04,823 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:04,824 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@581b236 +2017-02-18 06:21:04,838 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:04,857 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:04,887 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 914896 len: 914900 to MEMORY +2017-02-18 06:21:04,898 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:04,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 +2017-02-18 06:21:04,904 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:04,905 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:04,906 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:04,908 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:04,909 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes +2017-02-18 06:21:05,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 914896 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:05,232 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 914900 bytes from disk +2017-02-18 06:21:05,234 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:05,235 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:05,236 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes +2017-02-18 06:21:05,237 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:05,589 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 06:21:05,597 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:05,602 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000003_0 is allowed to commit now +2017-02-18 06:21:05,608 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000003 +2017-02-18 06:21:05,619 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:05,620 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000003_0' done. +2017-02-18 06:21:05,621 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000003_0 +2017-02-18 06:21:05,622 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000004_0 +2017-02-18 06:21:05,634 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:05,635 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:05,635 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@731e0de +2017-02-18 06:21:05,659 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:05,672 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:05,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1165904 len: 1165908 to MEMORY +2017-02-18 06:21:05,695 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:21:05,725 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:05,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 +2017-02-18 06:21:05,738 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:05,744 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:05,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:05,745 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:05,746 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes +2017-02-18 06:21:06,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1165904 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:06,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1165908 bytes from disk +2017-02-18 06:21:06,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:06,179 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:06,180 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes +2017-02-18 06:21:06,182 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:06,683 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 06:21:06,699 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:06,702 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 06:21:06,712 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000004_0 is allowed to commit now +2017-02-18 06:21:06,716 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000004 +2017-02-18 06:21:06,720 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:06,725 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000004_0' done. +2017-02-18 06:21:06,726 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000004_0 +2017-02-18 06:21:06,727 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000005_0 +2017-02-18 06:21:06,739 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:06,747 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:06,760 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@39a83e27 +2017-02-18 06:21:06,774 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:06,790 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:06,799 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 867472 len: 867476 to MEMORY +2017-02-18 06:21:06,851 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:06,851 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 +2017-02-18 06:21:06,852 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:06,853 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:06,853 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:06,855 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:06,855 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes +2017-02-18 06:21:07,146 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 867472 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:07,148 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 867476 bytes from disk +2017-02-18 06:21:07,150 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:07,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:07,153 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes +2017-02-18 06:21:07,157 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:07,479 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 06:21:07,497 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:07,510 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000005_0 is allowed to commit now +2017-02-18 06:21:07,513 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000005 +2017-02-18 06:21:07,518 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:07,524 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000005_0' done. +2017-02-18 06:21:07,525 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000005_0 +2017-02-18 06:21:07,526 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000006_0 +2017-02-18 06:21:07,543 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:07,544 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:07,545 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cb9c5d +2017-02-18 06:21:07,561 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:07,580 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:07,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1080418 len: 1080422 to MEMORY +2017-02-18 06:21:07,656 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:07,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 +2017-02-18 06:21:07,657 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:07,658 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:07,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:07,659 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:07,659 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes +2017-02-18 06:21:07,713 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 06:21:08,052 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1080418 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:08,059 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1080422 bytes from disk +2017-02-18 06:21:08,061 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:08,062 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:08,062 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes +2017-02-18 06:21:08,064 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:08,497 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 06:21:08,501 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:08,508 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000006_0 is allowed to commit now +2017-02-18 06:21:08,511 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000006 +2017-02-18 06:21:08,519 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:08,522 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000006_0' done. +2017-02-18 06:21:08,528 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000006_0 +2017-02-18 06:21:08,529 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000007_0 +2017-02-18 06:21:08,536 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:08,537 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:08,543 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5bfefcd4 +2017-02-18 06:21:08,560 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:08,572 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:08,587 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 821532 len: 821536 to MEMORY +2017-02-18 06:21:08,598 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:08,620 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 +2017-02-18 06:21:08,621 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:08,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:08,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:08,624 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:08,626 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes +2017-02-18 06:21:08,714 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% +2017-02-18 06:21:08,910 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 821532 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:08,914 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 821536 bytes from disk +2017-02-18 06:21:08,916 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:08,916 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:08,917 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes +2017-02-18 06:21:08,919 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:09,223 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 06:21:09,239 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:09,249 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000007_0 is allowed to commit now +2017-02-18 06:21:09,251 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000007 +2017-02-18 06:21:09,259 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:09,266 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000007_0' done. +2017-02-18 06:21:09,267 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000007_0 +2017-02-18 06:21:09,267 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000008_0 +2017-02-18 06:21:09,277 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:09,278 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:09,278 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@66a62053 +2017-02-18 06:21:09,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:09,313 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:09,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 1360514 len: 1360518 to MEMORY +2017-02-18 06:21:09,348 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:09,371 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 +2017-02-18 06:21:09,372 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:09,373 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:09,373 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:09,376 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:09,376 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes +2017-02-18 06:21:09,715 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 06:21:09,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1360514 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:09,878 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1360518 bytes from disk +2017-02-18 06:21:09,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:09,886 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:09,888 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes +2017-02-18 06:21:09,890 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:10,440 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 06:21:10,457 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:10,466 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000008_0 is allowed to commit now +2017-02-18 06:21:10,468 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000008 +2017-02-18 06:21:10,474 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:10,486 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000008_0' done. +2017-02-18 06:21:10,487 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000008_0 +2017-02-18 06:21:10,488 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local224597268_0001_r_000009_0 +2017-02-18 06:21:10,501 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:21:10,502 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:21:10,511 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41eab4e0 +2017-02-18 06:21:10,521 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:21:10,538 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local224597268_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:21:10,574 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local224597268_0001_m_000000_0 decomp: 835740 len: 835744 to MEMORY +2017-02-18 06:21:10,603 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local224597268_0001_m_000000_0 +2017-02-18 06:21:10,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 +2017-02-18 06:21:10,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:21:10,606 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:10,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:21:10,608 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:10,608 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes +2017-02-18 06:21:10,716 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-18 06:21:10,913 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 835740 bytes to disk to satisfy reduce memory limit +2017-02-18 06:21:10,918 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 835744 bytes from disk +2017-02-18 06:21:10,919 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:21:10,919 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:21:10,920 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes +2017-02-18 06:21:10,926 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:11,260 INFO org.apache.hadoop.mapred.Task: Task:attempt_local224597268_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 06:21:11,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:21:11,282 INFO org.apache.hadoop.mapred.Task: Task attempt_local224597268_0001_r_000009_0 is allowed to commit now +2017-02-18 06:21:11,298 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local224597268_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local224597268_0001_r_000009 +2017-02-18 06:21:11,300 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:21:11,303 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local224597268_0001_r_000009_0' done. +2017-02-18 06:21:11,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local224597268_0001_r_000009_0 +2017-02-18 06:21:11,304 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 06:21:11,475 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local224597268_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 06:21:11,720 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:21:11,721 INFO org.apache.hadoop.mapreduce.Job: Job job_local224597268_0001 failed with state FAILED due to: NA +2017-02-18 06:21:11,958 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=192567978 + FILE: Number of bytes written=191860141 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=124787 + Map output records=1023444 + Map output bytes=9171648 + Map output materialized bytes=11218596 + Input split bytes=116 + Combine input records=0 + Combine output records=0 + Reduce input groups=67780 + Reduce shuffle bytes=11218596 + Reduce input records=1023444 + Reduce output records=27 + Spilled Records=2046888 + Shuffled Maps =10 + Failed Shuffles=0 + Merged Map outputs=10 + GC time elapsed (ms)=337 + Total committed heap usage (bytes)=1821749248 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=5589889 + File Output Format Counters + Bytes Written=358 +2017-02-18 06:32:37,775 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 06:32:40,633 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 06:32:40,989 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 06:32:41,025 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 06:32:43,943 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 06:32:43,999 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 06:32:44,121 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 06:32:44,304 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 +2017-02-18 06:32:45,122 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 +2017-02-18 06:32:46,719 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 06:32:46,835 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 +2017-02-18 06:32:47,295 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local961145712_0001 +2017-02-18 06:32:47,386 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 +2017-02-18 06:32:49,204 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2033279662_0001 +2017-02-18 06:32:49,880 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 06:32:49,882 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local961145712_0001 +2017-02-18 06:32:49,907 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 06:32:50,028 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:32:50,055 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 06:32:50,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 06:32:50,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_m_000000_0 +2017-02-18 06:32:50,887 INFO org.apache.hadoop.mapreduce.Job: Job job_local961145712_0001 running in uber mode : false +2017-02-18 06:32:50,943 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 06:32:51,189 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:32:51,395 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:32:51,413 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 06:32:51,983 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 06:32:51,985 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2033279662_0001 +2017-02-18 06:32:52,029 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 06:32:52,095 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:32:52,107 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 06:32:53,025 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033279662_0001 running in uber mode : false +2017-02-18 06:32:53,566 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 06:32:54,509 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 06:32:54,532 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:32:55,134 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:32:55,155 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:32:55,156 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:32:55,156 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:32:55,156 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:32:55,210 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:32:55,544 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:32:55,692 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:32:55,709 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:32:55,807 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 06:32:59,289 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:32:59,291 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:32:59,352 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:32:59,510 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:33:00,946 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:33:01,174 INFO org.apache.hadoop.mapreduce.Job: map 2% reduce 0% +2017-02-18 06:33:03,950 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:33:04,184 INFO org.apache.hadoop.mapreduce.Job: map 18% reduce 0% +2017-02-18 06:33:04,335 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:33:05,264 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% +2017-02-18 06:33:06,969 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:33:07,196 INFO org.apache.hadoop.mapreduce.Job: map 53% reduce 0% +2017-02-18 06:33:07,339 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:33:08,031 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 06:33:08,032 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 06:33:08,273 INFO org.apache.hadoop.mapreduce.Job: map 32% reduce 0% +2017-02-18 06:33:09,913 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 06:33:09,914 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 06:33:09,971 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:10,206 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 06:33:10,340 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:11,286 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 06:33:12,972 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:13,342 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:15,973 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:16,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:18,981 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:19,347 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:21,984 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:22,348 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:33:22,816 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:33:22,846 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 06:33:22,859 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:33:22,873 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_m_000000_0' done. +2017-02-18 06:33:22,873 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:22,873 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 06:33:23,018 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 06:33:23,019 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000000_0 +2017-02-18 06:33:23,100 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:23,101 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:23,158 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@44f9c0c +2017-02-18 06:33:23,244 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:33:23,342 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:23,391 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:23,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 991747 len: 991751 to MEMORY +2017-02-18 06:33:23,798 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:23,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 +2017-02-18 06:33:23,854 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:23,855 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:23,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:23,917 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:23,918 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes +2017-02-18 06:33:25,206 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:33:25,255 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 06:33:25,258 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:33:25,273 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_m_000000_0' done. +2017-02-18 06:33:25,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:25,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 06:33:25,293 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 991747 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:25,294 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 991751 bytes from disk +2017-02-18 06:33:25,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:25,296 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:25,296 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes +2017-02-18 06:33:25,319 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:33:25,326 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:25,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 06:33:25,405 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000000_0 +2017-02-18 06:33:25,433 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 06:33:25,523 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:25,524 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:25,548 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1e60a696 +2017-02-18 06:33:25,733 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:25,782 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:26,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 991747 len: 991751 to MEMORY +2017-02-18 06:33:26,269 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:26,293 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 +2017-02-18 06:33:26,317 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:26,318 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:26,325 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:26,376 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:26,404 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes +2017-02-18 06:33:27,956 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 06:33:27,995 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:27,995 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000000_0 is allowed to commit now +2017-02-18 06:33:27,996 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000000 +2017-02-18 06:33:28,010 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:28,023 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000000_0' done. +2017-02-18 06:33:28,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000000_0 +2017-02-18 06:33:28,023 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000001_0 +2017-02-18 06:33:28,049 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:28,050 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:28,050 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1eb6a3c +2017-02-18 06:33:28,060 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:28,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:28,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 991747 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:28,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 991751 bytes from disk +2017-02-18 06:33:28,101 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:28,101 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:28,102 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 991742 bytes +2017-02-18 06:33:28,102 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:28,092 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1980898 len: 1980902 to MEMORY +2017-02-18 06:33:28,141 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:28,162 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 +2017-02-18 06:33:28,163 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:28,164 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:28,164 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:28,165 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:28,165 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes +2017-02-18 06:33:28,195 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 06:33:28,254 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 06:33:29,638 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1980898 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1980902 bytes from disk +2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:29,639 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes +2017-02-18 06:33:29,640 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:30,595 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 06:33:30,639 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:30,639 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000000_0 is allowed to commit now +2017-02-18 06:33:30,641 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000000 +2017-02-18 06:33:30,658 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:30,659 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000000_0' done. +2017-02-18 06:33:30,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000000_0 +2017-02-18 06:33:30,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000001_0 +2017-02-18 06:33:30,692 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:30,694 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:30,694 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@30eab853 +2017-02-18 06:33:30,711 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:30,755 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:30,770 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1980898 len: 1980902 to MEMORY +2017-02-18 06:33:30,796 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:30,860 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 +2017-02-18 06:33:30,861 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:30,862 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:30,862 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:30,864 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:30,864 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes +2017-02-18 06:33:31,350 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 06:33:31,388 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 06:33:31,398 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:31,399 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000001_0 is allowed to commit now +2017-02-18 06:33:31,423 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000001 +2017-02-18 06:33:31,432 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:31,432 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000001_0' done. +2017-02-18 06:33:31,432 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000001_0 +2017-02-18 06:33:31,433 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000002_0 +2017-02-18 06:33:31,451 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:31,452 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:31,452 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2deeef8c +2017-02-18 06:33:31,469 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:31,490 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:31,504 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1199435 len: 1199439 to MEMORY +2017-02-18 06:33:31,527 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:31,552 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 +2017-02-18 06:33:31,552 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:31,553 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:31,554 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:31,555 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:31,555 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes +2017-02-18 06:33:32,112 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1199435 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:32,113 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1199439 bytes from disk +2017-02-18 06:33:32,121 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:32,122 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:32,122 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes +2017-02-18 06:33:32,123 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:32,265 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 06:33:32,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1980898 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:32,288 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1980902 bytes from disk +2017-02-18 06:33:32,288 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:32,289 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:32,289 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1980895 bytes +2017-02-18 06:33:32,290 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:32,889 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 06:33:32,890 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:32,891 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000002_0 is allowed to commit now +2017-02-18 06:33:32,892 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000002 +2017-02-18 06:33:32,934 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:32,935 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000002_0' done. +2017-02-18 06:33:32,935 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000002_0 +2017-02-18 06:33:32,935 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000003_0 +2017-02-18 06:33:32,968 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:32,970 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:32,983 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@11a9281f +2017-02-18 06:33:33,003 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:33,035 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:33,097 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 914896 len: 914900 to MEMORY +2017-02-18 06:33:33,114 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:33,115 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 +2017-02-18 06:33:33,128 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:33,130 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:33,130 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:33,131 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:33,131 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes +2017-02-18 06:33:33,268 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 06:33:33,590 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 914896 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:33,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 914900 bytes from disk +2017-02-18 06:33:33,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:33,603 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:33,604 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes +2017-02-18 06:33:33,604 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:33,864 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 06:33:33,879 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:33,889 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000001_0 is allowed to commit now +2017-02-18 06:33:33,891 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000001 +2017-02-18 06:33:33,902 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:33,902 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000001_0' done. +2017-02-18 06:33:33,903 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000001_0 +2017-02-18 06:33:33,903 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000002_0 +2017-02-18 06:33:33,932 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:33,933 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:33,933 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7b4fb4eb +2017-02-18 06:33:33,944 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:33,968 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:33,994 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1199435 len: 1199439 to MEMORY +2017-02-18 06:33:34,020 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:34,035 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 +2017-02-18 06:33:34,036 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:34,038 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:34,039 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:34,040 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:34,040 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes +2017-02-18 06:33:34,122 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 06:33:34,137 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:34,138 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000003_0 is allowed to commit now +2017-02-18 06:33:34,139 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000003 +2017-02-18 06:33:34,162 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:34,163 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000003_0' done. +2017-02-18 06:33:34,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000003_0 +2017-02-18 06:33:34,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000004_0 +2017-02-18 06:33:34,186 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:34,187 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:34,197 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@15f3ca88 +2017-02-18 06:33:34,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:34,236 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:34,244 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1165904 len: 1165908 to MEMORY +2017-02-18 06:33:34,269 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:33:34,292 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:34,293 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 +2017-02-18 06:33:34,294 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:34,294 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:34,294 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:34,296 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:34,296 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes +2017-02-18 06:33:34,361 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 06:33:34,675 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1199435 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:34,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1199439 bytes from disk +2017-02-18 06:33:34,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:34,676 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:34,689 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1199425 bytes +2017-02-18 06:33:34,690 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:34,911 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1165904 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:34,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1165908 bytes from disk +2017-02-18 06:33:34,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:34,912 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:34,913 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes +2017-02-18 06:33:34,913 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:35,271 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 06:33:35,386 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 06:33:35,388 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:35,413 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000002_0 is allowed to commit now +2017-02-18 06:33:35,415 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000002 +2017-02-18 06:33:35,416 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:35,424 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000002_0' done. +2017-02-18 06:33:35,424 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000002_0 +2017-02-18 06:33:35,424 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000003_0 +2017-02-18 06:33:35,460 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:35,462 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:35,462 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@17fdb5c7 +2017-02-18 06:33:35,473 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:35,496 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:35,506 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 914896 len: 914900 to MEMORY +2017-02-18 06:33:35,552 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:35,576 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 +2017-02-18 06:33:35,577 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:35,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:35,578 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:35,593 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:35,593 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes +2017-02-18 06:33:35,693 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 06:33:35,708 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:35,724 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000004_0 is allowed to commit now +2017-02-18 06:33:35,751 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000004 +2017-02-18 06:33:35,753 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:35,767 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000004_0' done. +2017-02-18 06:33:35,768 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000004_0 +2017-02-18 06:33:35,768 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000005_0 +2017-02-18 06:33:35,784 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:35,785 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:35,804 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3aa101a7 +2017-02-18 06:33:35,821 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:35,846 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:35,855 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 867472 len: 867476 to MEMORY +2017-02-18 06:33:35,879 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:35,901 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 +2017-02-18 06:33:35,903 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:35,904 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:35,904 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:35,905 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:35,905 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes +2017-02-18 06:33:36,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 914896 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:36,090 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 914900 bytes from disk +2017-02-18 06:33:36,090 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:36,091 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:36,105 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 914884 bytes +2017-02-18 06:33:36,106 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:36,272 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% +2017-02-18 06:33:36,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 867472 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 867476 bytes from disk +2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:36,357 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes +2017-02-18 06:33:36,364 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 06:33:36,375 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:36,566 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 06:33:36,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:36,622 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000003_0 is allowed to commit now +2017-02-18 06:33:36,623 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000003 +2017-02-18 06:33:36,636 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:36,637 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000003_0' done. +2017-02-18 06:33:36,637 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000003_0 +2017-02-18 06:33:36,638 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000004_0 +2017-02-18 06:33:36,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:36,653 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:36,654 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@207e7fdb +2017-02-18 06:33:36,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:36,693 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:36,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1165904 len: 1165908 to MEMORY +2017-02-18 06:33:36,734 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:36,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 +2017-02-18 06:33:36,735 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:36,736 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:36,736 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:36,739 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:36,739 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes +2017-02-18 06:33:36,793 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 06:33:36,795 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:36,819 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000005_0 is allowed to commit now +2017-02-18 06:33:36,821 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000005 +2017-02-18 06:33:36,838 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:36,839 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000005_0' done. +2017-02-18 06:33:36,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000005_0 +2017-02-18 06:33:36,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000006_0 +2017-02-18 06:33:36,861 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:36,862 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:36,862 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41d04f3b +2017-02-18 06:33:36,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:36,899 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:36,931 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1080418 len: 1080422 to MEMORY +2017-02-18 06:33:36,959 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:36,977 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 +2017-02-18 06:33:36,978 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:36,978 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:36,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:36,980 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:36,980 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes +2017-02-18 06:33:37,273 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 06:33:37,364 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 06:33:37,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1165904 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:37,371 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1165908 bytes from disk +2017-02-18 06:33:37,373 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:37,373 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:37,373 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1165893 bytes +2017-02-18 06:33:37,374 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:37,566 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1080418 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1080422 bytes from disk +2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:37,567 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes +2017-02-18 06:33:37,568 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:38,122 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 06:33:38,145 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 06:33:38,147 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:38,147 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000004_0 is allowed to commit now +2017-02-18 06:33:38,149 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000004 +2017-02-18 06:33:38,154 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:38,154 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000006_0 is allowed to commit now +2017-02-18 06:33:38,155 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000006 +2017-02-18 06:33:38,157 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:38,158 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000004_0' done. +2017-02-18 06:33:38,158 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000004_0 +2017-02-18 06:33:38,158 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000005_0 +2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000006_0' done. +2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000006_0 +2017-02-18 06:33:38,170 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000007_0 +2017-02-18 06:33:38,181 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:38,182 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:38,182 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22d0ce11 +2017-02-18 06:33:38,183 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:38,184 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:38,202 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:38,203 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@57e13166 +2017-02-18 06:33:38,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:38,217 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:38,231 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:38,233 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 867472 len: 867476 to MEMORY +2017-02-18 06:33:38,260 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 821532 len: 821536 to MEMORY +2017-02-18 06:33:38,271 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:38,236 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:38,279 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 +2017-02-18 06:33:38,280 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:38,281 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:38,281 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:38,282 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:38,283 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes +2017-02-18 06:33:38,275 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:33:38,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 +2017-02-18 06:33:38,295 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:38,296 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:38,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:38,298 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:38,298 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes +2017-02-18 06:33:38,427 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% +2017-02-18 06:33:38,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 821532 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:38,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 821536 bytes from disk +2017-02-18 06:33:38,732 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:38,732 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:38,733 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes +2017-02-18 06:33:38,733 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:38,756 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 867472 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:38,758 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 867476 bytes from disk +2017-02-18 06:33:38,758 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:38,760 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:38,766 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 867468 bytes +2017-02-18 06:33:38,767 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:39,085 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 06:33:39,140 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:39,151 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000007_0 is allowed to commit now +2017-02-18 06:33:39,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000007 +2017-02-18 06:33:39,154 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:39,171 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000007_0' done. +2017-02-18 06:33:39,171 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000007_0 +2017-02-18 06:33:39,171 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000008_0 +2017-02-18 06:33:39,187 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:39,188 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:39,201 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@33ed55b9 +2017-02-18 06:33:39,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 06:33:39,212 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:39,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:39,225 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000005_0 is allowed to commit now +2017-02-18 06:33:39,243 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:39,243 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000005 +2017-02-18 06:33:39,245 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:39,254 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000005_0' done. +2017-02-18 06:33:39,254 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000005_0 +2017-02-18 06:33:39,254 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000006_0 +2017-02-18 06:33:39,267 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 1360514 len: 1360518 to MEMORY +2017-02-18 06:33:39,271 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:39,281 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:39,282 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:39,282 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4cda661a +2017-02-18 06:33:39,298 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:39,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 +2017-02-18 06:33:39,317 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:39,318 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:39,319 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:39,320 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:39,320 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes +2017-02-18 06:33:39,331 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:39,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1080418 len: 1080422 to MEMORY +2017-02-18 06:33:39,395 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:39,396 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 +2017-02-18 06:33:39,397 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:39,398 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:39,398 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:39,399 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:39,400 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes +2017-02-18 06:33:39,428 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 06:33:39,982 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1080418 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:39,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1080422 bytes from disk +2017-02-18 06:33:39,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:39,983 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:39,987 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1080407 bytes +2017-02-18 06:33:39,996 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:40,054 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1360514 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:40,072 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1360518 bytes from disk +2017-02-18 06:33:40,072 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:40,073 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:40,073 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes +2017-02-18 06:33:40,074 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:40,296 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 06:33:40,610 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 06:33:40,613 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:40,640 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000006_0 is allowed to commit now +2017-02-18 06:33:40,641 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000006 +2017-02-18 06:33:40,642 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:40,652 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000006_0' done. +2017-02-18 06:33:40,652 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000006_0 +2017-02-18 06:33:40,652 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000007_0 +2017-02-18 06:33:40,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:40,665 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:40,678 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7fc514a7 +2017-02-18 06:33:40,687 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:40,710 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:40,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 821532 len: 821536 to MEMORY +2017-02-18 06:33:40,768 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:40,769 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 +2017-02-18 06:33:40,769 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:40,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:40,770 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:40,772 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:40,772 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes +2017-02-18 06:33:40,839 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 06:33:40,869 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:40,880 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000008_0 is allowed to commit now +2017-02-18 06:33:40,881 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000008 +2017-02-18 06:33:40,893 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:40,893 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000008_0' done. +2017-02-18 06:33:40,894 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000008_0 +2017-02-18 06:33:40,910 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local961145712_0001_r_000009_0 +2017-02-18 06:33:40,924 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:40,925 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:40,926 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@75096410 +2017-02-18 06:33:40,935 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:40,963 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local961145712_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:40,978 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local961145712_0001_m_000000_0 decomp: 835740 len: 835744 to MEMORY +2017-02-18 06:33:41,021 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local961145712_0001_m_000000_0 +2017-02-18 06:33:41,021 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 +2017-02-18 06:33:41,022 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:41,023 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:41,023 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:41,025 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:41,025 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes +2017-02-18 06:33:41,177 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 821532 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:41,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 821536 bytes from disk +2017-02-18 06:33:41,179 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:41,179 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:41,179 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 821523 bytes +2017-02-18 06:33:41,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:41,300 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-18 06:33:41,436 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% +2017-02-18 06:33:41,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 835740 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 835744 bytes from disk +2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:41,449 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes +2017-02-18 06:33:41,464 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:41,642 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 06:33:41,688 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:41,689 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000007_0 is allowed to commit now +2017-02-18 06:33:41,705 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000007 +2017-02-18 06:33:41,706 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:41,721 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000007_0' done. +2017-02-18 06:33:41,721 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000007_0 +2017-02-18 06:33:41,721 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000008_0 +2017-02-18 06:33:41,732 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:41,733 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:41,757 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7888a8ee +2017-02-18 06:33:41,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:41,794 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:41,817 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 1360514 len: 1360518 to MEMORY +2017-02-18 06:33:41,861 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:41,862 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 +2017-02-18 06:33:41,862 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:41,863 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:41,863 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:41,865 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:41,865 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes +2017-02-18 06:33:41,981 INFO org.apache.hadoop.mapred.Task: Task:attempt_local961145712_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 06:33:42,006 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:42,020 INFO org.apache.hadoop.mapred.Task: Task attempt_local961145712_0001_r_000009_0 is allowed to commit now +2017-02-18 06:33:42,021 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local961145712_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local961145712_0001_r_000009 +2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local961145712_0001_r_000009_0' done. +2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local961145712_0001_r_000009_0 +2017-02-18 06:33:42,034 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 06:33:42,301 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:33:42,437 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 06:33:42,476 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local961145712_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 06:33:42,798 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1360514 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:42,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1360518 bytes from disk +2017-02-18 06:33:42,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:42,799 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:42,800 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1360506 bytes +2017-02-18 06:33:42,800 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:43,303 INFO org.apache.hadoop.mapreduce.Job: Job job_local961145712_0001 failed with state FAILED due to: NA +2017-02-18 06:33:43,633 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=192567978 + FILE: Number of bytes written=191859731 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=124787 + Map output records=1023444 + Map output bytes=9171648 + Map output materialized bytes=11218596 + Input split bytes=116 + Combine input records=0 + Combine output records=0 + Reduce input groups=67780 + Reduce shuffle bytes=11218596 + Reduce input records=1023444 + Reduce output records=27 + Spilled Records=2046888 + Shuffled Maps =10 + Failed Shuffles=0 + Merged Map outputs=10 + GC time elapsed (ms)=543 + Total committed heap usage (bytes)=1821749248 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=5589889 + File Output Format Counters + Bytes Written=268 +2017-02-18 06:33:43,644 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 06:33:43,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:43,677 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000008_0 is allowed to commit now +2017-02-18 06:33:43,681 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000008 +2017-02-18 06:33:43,691 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:43,701 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000008_0' done. +2017-02-18 06:33:43,702 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000008_0 +2017-02-18 06:33:43,712 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2033279662_0001_r_000009_0 +2017-02-18 06:33:43,728 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:33:43,729 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:33:43,739 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1197c2dd +2017-02-18 06:33:43,761 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:33:43,778 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2033279662_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:33:43,793 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2033279662_0001_m_000000_0 decomp: 835740 len: 835744 to MEMORY +2017-02-18 06:33:43,820 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local2033279662_0001_m_000000_0 +2017-02-18 06:33:43,830 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 +2017-02-18 06:33:43,831 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:33:43,831 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:43,832 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:33:43,833 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:43,833 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes +2017-02-18 06:33:44,218 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 835740 bytes to disk to satisfy reduce memory limit +2017-02-18 06:33:44,221 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 835744 bytes from disk +2017-02-18 06:33:44,225 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:33:44,227 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:33:44,228 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 835728 bytes +2017-02-18 06:33:44,230 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:44,443 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-18 06:33:44,573 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2033279662_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 06:33:44,577 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 06:33:44,583 INFO org.apache.hadoop.mapred.Task: Task attempt_local2033279662_0001_r_000009_0 is allowed to commit now +2017-02-18 06:33:44,590 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2033279662_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_pg100/_temporary/0/task_local2033279662_0001_r_000009 +2017-02-18 06:33:44,596 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:33:44,600 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2033279662_0001_r_000009_0' done. +2017-02-18 06:33:44,602 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2033279662_0001_r_000009_0 +2017-02-18 06:33:44,602 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 06:33:44,704 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2033279662_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 06:33:45,444 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:33:45,445 INFO org.apache.hadoop.mapreduce.Job: Job job_local2033279662_0001 failed with state FAILED due to: NA +2017-02-18 06:33:45,698 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=192567978 + FILE: Number of bytes written=191875879 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=124787 + Map output records=1023444 + Map output bytes=9171648 + Map output materialized bytes=11218596 + Input split bytes=116 + Combine input records=0 + Combine output records=0 + Reduce input groups=67780 + Reduce shuffle bytes=11218596 + Reduce input records=1023444 + Reduce output records=27 + Spilled Records=2046888 + Shuffled Maps =10 + Failed Shuffles=0 + Merged Map outputs=10 + GC time elapsed (ms)=495 + Total committed heap usage (bytes)=1821749248 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=5589889 + File Output Format Counters + Bytes Written=268 +2017-02-18 06:43:25,426 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 06:43:28,141 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 06:43:28,157 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 06:43:30,140 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 06:43:30,229 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 06:43:30,641 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 06:43:32,213 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local767302091_0001 +2017-02-18 06:43:34,192 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 06:43:34,193 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local767302091_0001 +2017-02-18 06:43:34,210 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 06:43:34,295 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:43:34,310 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 06:43:34,780 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 06:43:34,782 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_m_000000_0 +2017-02-18 06:43:35,010 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:43:35,115 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:43:35,147 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 06:43:35,196 INFO org.apache.hadoop.mapreduce.Job: Job job_local767302091_0001 running in uber mode : false +2017-02-18 06:43:35,198 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 06:43:35,853 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:43:35,866 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:43:35,867 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:43:35,868 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:43:35,868 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:43:35,890 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:43:35,944 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:43:41,093 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:43:41,228 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-18 06:43:44,095 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:43:44,240 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% +2017-02-18 06:43:47,108 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:43:47,246 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 06:43:50,111 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:43:50,254 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% +2017-02-18 06:43:50,504 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:43:50,511 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:43:50,512 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:43:50,513 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 06:43:50,513 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 06:43:53,112 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:43:53,262 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 06:43:56,113 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:43:59,115 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:02,117 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:05,121 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:08,125 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:11,129 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:14,132 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:24,792 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:44:24,818 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 06:44:24,830 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:44:24,837 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_m_000000_0' done. +2017-02-18 06:44:24,838 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_m_000000_0 +2017-02-18 06:44:24,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_m_000001_0 +2017-02-18 06:44:24,856 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:44:24,857 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:44:24,873 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 06:44:25,245 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:44:25,248 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:44:25,248 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:44:25,249 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:44:25,249 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:44:25,259 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:44:25,268 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:44:25,328 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:44:29,321 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 06:44:29,351 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:44:29,352 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:44:29,352 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 06:44:29,352 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 06:44:30,339 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 06:44:30,880 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:31,341 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 06:44:33,883 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:36,889 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:39,581 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:44:39,604 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 06:44:39,610 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:44:39,614 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_m_000001_0' done. +2017-02-18 06:44:39,615 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_m_000001_0 +2017-02-18 06:44:39,616 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_m_000002_0 +2017-02-18 06:44:39,624 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:44:39,625 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:44:39,626 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 06:44:40,121 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:44:40,137 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:44:40,140 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:44:40,141 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:44:40,141 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:44:40,149 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:44:40,151 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:44:40,363 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:44:43,697 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 06:44:43,713 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 06:44:44,371 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 06:44:45,636 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:46,373 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 06:44:48,638 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:51,640 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:44:52,111 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:44:52,125 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 06:44:52,134 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:44:52,144 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_m_000002_0' done. +2017-02-18 06:44:52,145 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_m_000002_0 +2017-02-18 06:44:52,146 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 06:44:52,242 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 06:44:52,243 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000000_0 +2017-02-18 06:44:52,287 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:44:52,288 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:44:52,315 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7f357760 +2017-02-18 06:44:52,388 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:44:52,421 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:44:52,466 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:44:52,702 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 991747 len: 991751 to MEMORY +2017-02-18 06:44:52,748 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:44:52,773 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->991747 +2017-02-18 06:44:52,790 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2685875 len: 2685879 to MEMORY +2017-02-18 06:44:52,827 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2685875 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:44:52,859 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2685875, inMemoryMapOutputs.size() -> 2, commitMemory -> 991747, usedMemory ->3677622 +2017-02-18 06:44:52,865 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 881571 len: 881575 to MEMORY +2017-02-18 06:44:52,877 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 881571 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:44:52,891 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 881571, inMemoryMapOutputs.size() -> 3, commitMemory -> 3677622, usedMemory ->4559193 +2017-02-18 06:44:52,893 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:44:52,894 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:44:52,895 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:44:52,923 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:44:52,928 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4559151 bytes +2017-02-18 06:44:55,329 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4559193 bytes to disk to satisfy reduce memory limit +2017-02-18 06:44:55,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4559193 bytes from disk +2017-02-18 06:44:55,342 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:44:55,345 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:44:55,346 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4559175 bytes +2017-02-18 06:44:55,351 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:44:55,403 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 06:44:58,216 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 06:44:58,249 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:44:58,257 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000000_0 is allowed to commit now +2017-02-18 06:44:58,259 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000000 +2017-02-18 06:44:58,266 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:44:58,272 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000000_0' done. +2017-02-18 06:44:58,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000000_0 +2017-02-18 06:44:58,274 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000001_0 +2017-02-18 06:44:58,287 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:44:58,288 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:44:58,289 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@390f65b9 +2017-02-18 06:44:58,309 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:44:58,328 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:44:58,356 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1980898 len: 1980902 to MEMORY +2017-02-18 06:44:58,408 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:44:58,388 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:44:58,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1980898 +2017-02-18 06:44:58,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 3630499 len: 3630503 to MEMORY +2017-02-18 06:44:58,504 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3630499 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:44:58,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3630499, inMemoryMapOutputs.size() -> 2, commitMemory -> 1980898, usedMemory ->5611397 +2017-02-18 06:44:58,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 992056 len: 992060 to MEMORY +2017-02-18 06:44:58,544 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 992056 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:44:58,547 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 992056, inMemoryMapOutputs.size() -> 3, commitMemory -> 5611397, usedMemory ->6603453 +2017-02-18 06:44:58,549 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:44:58,550 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:44:58,550 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:44:58,553 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:44:58,553 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6603444 bytes +2017-02-18 06:44:59,409 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 06:45:01,414 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6603453 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6603453 bytes from disk +2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:01,416 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6603446 bytes +2017-02-18 06:45:01,424 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:04,301 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:04,421 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 06:45:04,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 06:45:04,770 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:04,771 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000001_0 is allowed to commit now +2017-02-18 06:45:04,772 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000001 +2017-02-18 06:45:04,795 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:04,795 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000001_0' done. +2017-02-18 06:45:04,796 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000001_0 +2017-02-18 06:45:04,796 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000002_0 +2017-02-18 06:45:04,821 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:04,823 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:04,823 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@20739903 +2017-02-18 06:45:04,834 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:04,848 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:04,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1199435 len: 1199439 to MEMORY +2017-02-18 06:45:04,894 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:04,928 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1199435 +2017-02-18 06:45:04,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 4721951 len: 4721955 to MEMORY +2017-02-18 06:45:05,041 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4721951 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:05,054 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4721951, inMemoryMapOutputs.size() -> 2, commitMemory -> 1199435, usedMemory ->5921386 +2017-02-18 06:45:05,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 1178681 len: 1178685 to MEMORY +2017-02-18 06:45:05,079 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1178681 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:05,096 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1178681, inMemoryMapOutputs.size() -> 3, commitMemory -> 5921386, usedMemory ->7100067 +2017-02-18 06:45:05,098 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:05,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:05,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:05,102 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:05,103 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7100041 bytes +2017-02-18 06:45:07,803 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7100067 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:07,810 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7100067 bytes from disk +2017-02-18 06:45:07,810 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:07,810 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:07,811 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7100054 bytes +2017-02-18 06:45:07,815 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:10,294 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 06:45:10,298 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:10,299 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000002_0 is allowed to commit now +2017-02-18 06:45:10,299 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000002 +2017-02-18 06:45:10,312 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:10,318 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000002_0' done. +2017-02-18 06:45:10,319 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000002_0 +2017-02-18 06:45:10,319 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000003_0 +2017-02-18 06:45:10,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:10,329 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:10,330 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1b082505 +2017-02-18 06:45:10,356 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:10,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:10,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 914896 len: 914900 to MEMORY +2017-02-18 06:45:10,396 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:10,417 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->914896 +2017-02-18 06:45:10,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2586992 len: 2586996 to MEMORY +2017-02-18 06:45:10,441 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:45:10,478 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2586992 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:10,481 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2586992, inMemoryMapOutputs.size() -> 2, commitMemory -> 914896, usedMemory ->3501888 +2017-02-18 06:45:10,487 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 750229 len: 750233 to MEMORY +2017-02-18 06:45:10,523 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 750229 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:10,524 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 750229, inMemoryMapOutputs.size() -> 3, commitMemory -> 3501888, usedMemory ->4252117 +2017-02-18 06:45:10,524 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:10,525 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:10,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:10,527 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:10,527 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4252081 bytes +2017-02-18 06:45:11,445 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 06:45:12,063 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4252117 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:12,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4252117 bytes from disk +2017-02-18 06:45:12,067 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:12,067 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:12,069 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4252100 bytes +2017-02-18 06:45:12,071 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:13,479 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 06:45:13,485 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:13,485 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000003_0 is allowed to commit now +2017-02-18 06:45:13,486 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000003 +2017-02-18 06:45:13,501 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:13,504 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000003_0' done. +2017-02-18 06:45:13,505 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000003_0 +2017-02-18 06:45:13,505 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000004_0 +2017-02-18 06:45:13,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:13,519 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:13,519 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7703d828 +2017-02-18 06:45:13,525 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:13,550 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:13,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1165904 len: 1165908 to MEMORY +2017-02-18 06:45:13,576 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:13,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1165904 +2017-02-18 06:45:13,605 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 3465672 len: 3465676 to MEMORY +2017-02-18 06:45:13,684 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3465672 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:13,689 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3465672, inMemoryMapOutputs.size() -> 2, commitMemory -> 1165904, usedMemory ->4631576 +2017-02-18 06:45:13,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 962343 len: 962347 to MEMORY +2017-02-18 06:45:13,708 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 962343 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:13,723 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 962343, inMemoryMapOutputs.size() -> 3, commitMemory -> 4631576, usedMemory ->5593919 +2017-02-18 06:45:13,725 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:13,726 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:13,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:13,762 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:13,763 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5593898 bytes +2017-02-18 06:45:14,453 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 06:45:15,937 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5593919 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5593919 bytes from disk +2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:15,939 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5593911 bytes +2017-02-18 06:45:15,947 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:18,033 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 06:45:18,042 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:18,042 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000004_0 is allowed to commit now +2017-02-18 06:45:18,043 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000004 +2017-02-18 06:45:18,058 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:18,061 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000004_0' done. +2017-02-18 06:45:18,062 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000004_0 +2017-02-18 06:45:18,063 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000005_0 +2017-02-18 06:45:18,076 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:18,077 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:18,077 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7adb5354 +2017-02-18 06:45:18,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:18,109 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:18,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 867472 len: 867476 to MEMORY +2017-02-18 06:45:18,130 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:18,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->867472 +2017-02-18 06:45:18,154 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2657280 len: 2657284 to MEMORY +2017-02-18 06:45:18,208 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2657280 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:18,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2657280, inMemoryMapOutputs.size() -> 2, commitMemory -> 867472, usedMemory ->3524752 +2017-02-18 06:45:18,222 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 725836 len: 725840 to MEMORY +2017-02-18 06:45:18,246 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 725836 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:18,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 725836, inMemoryMapOutputs.size() -> 3, commitMemory -> 3524752, usedMemory ->4250588 +2017-02-18 06:45:18,254 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:18,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:18,255 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:18,262 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:18,262 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4250568 bytes +2017-02-18 06:45:18,464 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% +2017-02-18 06:45:19,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4250588 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:19,758 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4250588 bytes from disk +2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4250579 bytes +2017-02-18 06:45:19,759 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:21,241 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 06:45:21,245 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:21,245 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000005_0 is allowed to commit now +2017-02-18 06:45:21,246 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000005 +2017-02-18 06:45:21,261 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:21,263 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000005_0' done. +2017-02-18 06:45:21,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000005_0 +2017-02-18 06:45:21,265 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000006_0 +2017-02-18 06:45:21,273 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:21,274 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:21,274 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f3ed32a +2017-02-18 06:45:21,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:21,318 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:21,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1080418 len: 1080422 to MEMORY +2017-02-18 06:45:21,339 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:21,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1080418 +2017-02-18 06:45:21,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 3036363 len: 3036367 to MEMORY +2017-02-18 06:45:21,442 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3036363 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:21,453 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3036363, inMemoryMapOutputs.size() -> 2, commitMemory -> 1080418, usedMemory ->4116781 +2017-02-18 06:45:21,455 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 855513 len: 855517 to MEMORY +2017-02-18 06:45:21,472 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:45:21,482 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 855513 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:21,488 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 855513, inMemoryMapOutputs.size() -> 3, commitMemory -> 4116781, usedMemory ->4972294 +2017-02-18 06:45:21,491 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:21,492 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:21,493 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:21,494 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:21,494 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4972265 bytes +2017-02-18 06:45:22,480 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 06:45:23,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4972294 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:23,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4972294 bytes from disk +2017-02-18 06:45:23,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:23,369 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:23,370 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4972282 bytes +2017-02-18 06:45:23,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:25,125 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 06:45:25,151 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:25,152 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000006_0 is allowed to commit now +2017-02-18 06:45:25,154 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000006 +2017-02-18 06:45:25,159 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:25,162 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000006_0' done. +2017-02-18 06:45:25,164 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000006_0 +2017-02-18 06:45:25,165 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000007_0 +2017-02-18 06:45:25,174 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:25,174 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:25,179 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@fb8896c +2017-02-18 06:45:25,189 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:25,207 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:25,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 821532 len: 821536 to MEMORY +2017-02-18 06:45:25,244 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:25,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->821532 +2017-02-18 06:45:25,254 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2552612 len: 2552616 to MEMORY +2017-02-18 06:45:25,323 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2552612 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:25,323 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2552612, inMemoryMapOutputs.size() -> 2, commitMemory -> 821532, usedMemory ->3374144 +2017-02-18 06:45:25,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 685165 len: 685169 to MEMORY +2017-02-18 06:45:25,349 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 685165 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:25,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 685165, inMemoryMapOutputs.size() -> 3, commitMemory -> 3374144, usedMemory ->4059309 +2017-02-18 06:45:25,367 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:25,369 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:25,369 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:25,372 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:25,372 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4059289 bytes +2017-02-18 06:45:25,490 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% +2017-02-18 06:45:26,915 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4059309 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:26,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4059309 bytes from disk +2017-02-18 06:45:26,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:26,917 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:26,918 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4059300 bytes +2017-02-18 06:45:26,918 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:28,258 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 06:45:28,294 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:28,301 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000007_0 is allowed to commit now +2017-02-18 06:45:28,303 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000007 +2017-02-18 06:45:28,313 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:28,316 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000007_0' done. +2017-02-18 06:45:28,316 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000007_0 +2017-02-18 06:45:28,317 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000008_0 +2017-02-18 06:45:28,323 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:28,324 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:28,324 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@10bd314a +2017-02-18 06:45:28,343 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:28,363 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:28,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 1360514 len: 1360518 to MEMORY +2017-02-18 06:45:28,398 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:28,422 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1360514 +2017-02-18 06:45:28,426 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 5197932 len: 5197936 to MEMORY +2017-02-18 06:45:28,495 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:45:28,533 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5197932 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:28,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5197932, inMemoryMapOutputs.size() -> 2, commitMemory -> 1360514, usedMemory ->6558446 +2017-02-18 06:45:28,547 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 1500234 len: 1500238 to MEMORY +2017-02-18 06:45:28,587 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1500234 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:28,600 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1500234, inMemoryMapOutputs.size() -> 3, commitMemory -> 6558446, usedMemory ->8058680 +2017-02-18 06:45:28,602 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:28,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:28,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:28,604 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:28,604 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8058660 bytes +2017-02-18 06:45:29,500 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 06:45:31,681 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8058680 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:31,683 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8058680 bytes from disk +2017-02-18 06:45:31,683 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:31,683 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:31,686 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8058671 bytes +2017-02-18 06:45:31,687 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:34,340 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:34,510 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-18 06:45:34,682 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 06:45:34,703 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:34,712 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000008_0 is allowed to commit now +2017-02-18 06:45:34,713 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000008 +2017-02-18 06:45:34,714 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:34,716 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000008_0' done. +2017-02-18 06:45:34,722 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000008_0 +2017-02-18 06:45:34,724 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local767302091_0001_r_000009_0 +2017-02-18 06:45:34,734 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:45:34,735 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:45:34,743 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@63c39669 +2017-02-18 06:45:34,752 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:45:34,772 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local767302091_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:45:34,796 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local767302091_0001_m_000001_0 decomp: 835740 len: 835744 to MEMORY +2017-02-18 06:45:34,817 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local767302091_0001_m_000001_0 +2017-02-18 06:45:34,836 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->835740 +2017-02-18 06:45:34,855 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local767302091_0001_m_000000_0 decomp: 2114762 len: 2114766 to MEMORY +2017-02-18 06:45:34,908 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2114762 bytes from map-output for attempt_local767302091_0001_m_000000_0 +2017-02-18 06:45:34,908 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2114762, inMemoryMapOutputs.size() -> 2, commitMemory -> 835740, usedMemory ->2950502 +2017-02-18 06:45:34,924 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local767302091_0001_m_000002_0 decomp: 596065 len: 596069 to MEMORY +2017-02-18 06:45:34,938 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 596065 bytes from map-output for attempt_local767302091_0001_m_000002_0 +2017-02-18 06:45:34,943 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 596065, inMemoryMapOutputs.size() -> 3, commitMemory -> 2950502, usedMemory ->3546567 +2017-02-18 06:45:34,947 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:45:34,949 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:34,949 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:45:34,950 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:45:34,950 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3546539 bytes +2017-02-18 06:45:36,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3546567 bytes to disk to satisfy reduce memory limit +2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3546567 bytes from disk +2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:45:36,160 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3546555 bytes +2017-02-18 06:45:36,161 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:37,296 INFO org.apache.hadoop.mapred.Task: Task:attempt_local767302091_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 06:45:37,336 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:45:37,340 INFO org.apache.hadoop.mapred.Task: Task attempt_local767302091_0001_r_000009_0 is allowed to commit now +2017-02-18 06:45:37,342 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local767302091_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local767302091_0001_r_000009 +2017-02-18 06:45:37,348 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:45:37,351 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local767302091_0001_r_000009_0' done. +2017-02-18 06:45:37,351 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local767302091_0001_r_000009_0 +2017-02-18 06:45:37,354 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 06:45:37,515 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:45:37,562 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local767302091_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 06:45:38,520 INFO org.apache.hadoop.mapreduce.Job: Job job_local767302091_0001 failed with state FAILED due to: NA +2017-02-18 06:45:38,725 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=921244792 + FILE: Number of bytes written=961214284 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=52996307 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Reduce input groups=217527 + Reduce shuffle bytes=52996307 + Reduce input records=4678719 + Reduce output records=124 + Spilled Records=9357438 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=912 + Total committed heap usage (bytes)=2551959552 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=896 +2017-02-18 06:53:36,039 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 06:53:38,565 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 06:53:38,610 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 06:53:40,517 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 06:53:40,600 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 06:53:41,038 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 06:53:42,458 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1538746324_0001 +2017-02-18 06:53:44,381 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 06:53:44,382 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1538746324_0001 +2017-02-18 06:53:44,411 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 06:53:44,463 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:53:44,480 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 06:53:44,911 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 06:53:44,913 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:53:45,156 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:53:45,269 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:53:45,281 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 06:53:45,415 INFO org.apache.hadoop.mapreduce.Job: Job job_local1538746324_0001 running in uber mode : false +2017-02-18 06:53:45,418 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 06:53:45,986 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:53:46,017 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:53:46,018 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:53:46,019 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:53:46,019 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:53:46,045 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:53:46,086 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:53:51,226 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:53:51,439 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-18 06:53:54,228 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:53:54,444 INFO org.apache.hadoop.mapreduce.Job: map 8% reduce 0% +2017-02-18 06:53:57,233 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:53:57,455 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 06:54:00,236 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:54:00,467 INFO org.apache.hadoop.mapreduce.Job: map 20% reduce 0% +2017-02-18 06:54:01,451 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 06:54:01,452 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 06:54:03,238 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:03,487 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 06:54:06,239 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:09,241 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:12,248 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:15,252 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:18,259 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:21,260 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:24,265 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:27,266 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:38,774 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:54:38,815 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 06:54:38,836 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:54:38,849 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_m_000000_0' done. +2017-02-18 06:54:38,849 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:54:38,850 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:54:38,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:54:38,860 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:54:38,880 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 06:54:39,218 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:54:39,220 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:54:39,221 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:54:39,221 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:54:39,221 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:54:39,228 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:54:39,233 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:54:39,608 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:54:43,504 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 06:54:43,523 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:54:43,525 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:54:43,525 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 06:54:43,525 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 06:54:43,638 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 06:54:44,896 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:45,644 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 06:54:47,899 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:50,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:54:53,856 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:54:53,866 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 06:54:53,881 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:54:53,886 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_m_000001_0' done. +2017-02-18 06:54:53,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:54:53,888 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:54:53,896 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:54:53,897 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:54:53,906 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 06:54:54,305 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 06:54:54,327 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 06:54:54,328 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 06:54:54,329 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 06:54:54,329 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 06:54:54,339 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 06:54:54,342 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 06:54:54,668 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:54:58,024 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 06:54:58,036 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 06:54:58,037 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 06:54:58,037 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 06:54:58,037 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 06:54:58,680 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 06:54:59,916 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:55:00,688 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 06:55:02,919 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:55:05,920 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 06:55:06,114 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 06:55:06,133 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 06:55:06,139 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 06:55:06,145 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_m_000002_0' done. +2017-02-18 06:55:06,148 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:06,150 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 06:55:06,268 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 06:55:06,277 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000000_0 +2017-02-18 06:55:06,360 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:06,361 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:06,394 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@41350279 +2017-02-18 06:55:06,569 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:06,596 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:06,720 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 06:55:06,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 881571 len: 881575 to MEMORY +2017-02-18 06:55:07,005 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 881571 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:07,025 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 881571, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->881571 +2017-02-18 06:55:07,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 991747 len: 991751 to MEMORY +2017-02-18 06:55:07,062 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 991747 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:07,084 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 991747, inMemoryMapOutputs.size() -> 2, commitMemory -> 881571, usedMemory ->1873318 +2017-02-18 06:55:07,097 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2685875 len: 2685879 to MEMORY +2017-02-18 06:55:07,203 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2685875 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:07,214 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2685875, inMemoryMapOutputs.size() -> 3, commitMemory -> 1873318, usedMemory ->4559193 +2017-02-18 06:55:07,218 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:07,220 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:07,221 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:07,284 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:07,285 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4559151 bytes +2017-02-18 06:55:09,501 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4559193 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:09,503 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4559193 bytes from disk +2017-02-18 06:55:09,514 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:09,517 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:09,518 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4559175 bytes +2017-02-18 06:55:09,526 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:09,675 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 06:55:12,380 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:12,495 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 06:55:12,524 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:12,525 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000000_0 is allowed to commit now +2017-02-18 06:55:12,531 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000000 +2017-02-18 06:55:12,540 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:12,543 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000000_0' done. +2017-02-18 06:55:12,544 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000000_0 +2017-02-18 06:55:12,544 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000001_0 +2017-02-18 06:55:12,575 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:12,576 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:12,576 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5ba21282 +2017-02-18 06:55:12,585 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:12,599 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:12,609 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 992056 len: 992060 to MEMORY +2017-02-18 06:55:12,636 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 992056 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:12,669 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 992056, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->992056 +2017-02-18 06:55:12,687 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1980898 len: 1980902 to MEMORY +2017-02-18 06:55:12,726 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1980898 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:12,736 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:55:12,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1980898, inMemoryMapOutputs.size() -> 2, commitMemory -> 992056, usedMemory ->2972954 +2017-02-18 06:55:12,754 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 3630499 len: 3630503 to MEMORY +2017-02-18 06:55:12,907 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3630499 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:12,910 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3630499, inMemoryMapOutputs.size() -> 3, commitMemory -> 2972954, usedMemory ->6603453 +2017-02-18 06:55:12,911 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:12,913 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:12,913 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:12,914 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:12,915 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6603444 bytes +2017-02-18 06:55:13,751 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 06:55:15,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6603453 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6603453 bytes from disk +2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:15,510 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6603446 bytes +2017-02-18 06:55:15,511 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:18,589 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:18,762 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 06:55:18,944 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 06:55:18,946 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:18,957 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000001_0 is allowed to commit now +2017-02-18 06:55:18,959 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000001 +2017-02-18 06:55:18,967 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:18,969 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000001_0' done. +2017-02-18 06:55:18,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000001_0 +2017-02-18 06:55:18,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000002_0 +2017-02-18 06:55:18,999 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:19,000 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:19,001 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2455dbb6 +2017-02-18 06:55:19,016 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:19,037 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:19,050 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 1178681 len: 1178685 to MEMORY +2017-02-18 06:55:19,075 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1178681 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:19,109 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1178681, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1178681 +2017-02-18 06:55:19,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1199435 len: 1199439 to MEMORY +2017-02-18 06:55:19,156 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1199435 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:19,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1199435, inMemoryMapOutputs.size() -> 2, commitMemory -> 1178681, usedMemory ->2378116 +2017-02-18 06:55:19,186 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 4721951 len: 4721955 to MEMORY +2017-02-18 06:55:19,372 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4721951 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:19,393 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4721951, inMemoryMapOutputs.size() -> 3, commitMemory -> 2378116, usedMemory ->7100067 +2017-02-18 06:55:19,393 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:19,394 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:19,394 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:19,398 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:19,398 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7100041 bytes +2017-02-18 06:55:22,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7100067 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:22,371 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7100067 bytes from disk +2017-02-18 06:55:22,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:22,388 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:22,389 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7100054 bytes +2017-02-18 06:55:22,389 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:25,015 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:25,784 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 29% +2017-02-18 06:55:26,062 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 06:55:26,095 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:26,101 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000002_0 is allowed to commit now +2017-02-18 06:55:26,103 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000002 +2017-02-18 06:55:26,111 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:26,113 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000002_0' done. +2017-02-18 06:55:26,114 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000002_0 +2017-02-18 06:55:26,114 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000003_0 +2017-02-18 06:55:26,127 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:26,128 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:26,128 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5602c43c +2017-02-18 06:55:26,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:26,159 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:26,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 750229 len: 750233 to MEMORY +2017-02-18 06:55:26,184 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 750229 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:26,208 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 750229, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->750229 +2017-02-18 06:55:26,231 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 914896 len: 914900 to MEMORY +2017-02-18 06:55:26,235 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 914896 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:26,258 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 914896, inMemoryMapOutputs.size() -> 2, commitMemory -> 750229, usedMemory ->1665125 +2017-02-18 06:55:26,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2586992 len: 2586996 to MEMORY +2017-02-18 06:55:26,346 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2586992 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:26,376 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2586992, inMemoryMapOutputs.size() -> 3, commitMemory -> 1665125, usedMemory ->4252117 +2017-02-18 06:55:26,377 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:26,378 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:26,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:26,379 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:26,380 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4252081 bytes +2017-02-18 06:55:26,788 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 06:55:27,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4252117 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:27,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4252117 bytes from disk +2017-02-18 06:55:27,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:27,990 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:27,991 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4252100 bytes +2017-02-18 06:55:27,991 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:29,337 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 06:55:29,345 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:29,345 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000003_0 is allowed to commit now +2017-02-18 06:55:29,346 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000003 +2017-02-18 06:55:29,359 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:29,364 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000003_0' done. +2017-02-18 06:55:29,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000003_0 +2017-02-18 06:55:29,366 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000004_0 +2017-02-18 06:55:29,378 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:29,379 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:29,380 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2ee007c9 +2017-02-18 06:55:29,394 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:29,415 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:29,427 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 962343 len: 962347 to MEMORY +2017-02-18 06:55:29,458 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 962343 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:29,467 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 962343, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->962343 +2017-02-18 06:55:29,471 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1165904 len: 1165908 to MEMORY +2017-02-18 06:55:29,513 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1165904 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:29,513 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1165904, inMemoryMapOutputs.size() -> 2, commitMemory -> 962343, usedMemory ->2128247 +2017-02-18 06:55:29,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 3465672 len: 3465676 to MEMORY +2017-02-18 06:55:29,667 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3465672 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:29,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3465672, inMemoryMapOutputs.size() -> 3, commitMemory -> 2128247, usedMemory ->5593919 +2017-02-18 06:55:29,680 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:29,681 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:29,681 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:29,685 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:29,685 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5593898 bytes +2017-02-18 06:55:29,797 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 06:55:31,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5593919 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5593919 bytes from disk +2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:31,706 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5593911 bytes +2017-02-18 06:55:31,707 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:33,710 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 06:55:33,726 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:33,728 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000004_0 is allowed to commit now +2017-02-18 06:55:33,730 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000004 +2017-02-18 06:55:33,735 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:33,741 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000004_0' done. +2017-02-18 06:55:33,742 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000004_0 +2017-02-18 06:55:33,743 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000005_0 +2017-02-18 06:55:33,750 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:33,751 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:33,752 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@51080bce +2017-02-18 06:55:33,765 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:33,784 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:33,805 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 725836 len: 725840 to MEMORY +2017-02-18 06:55:33,809 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:55:33,815 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 725836 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:33,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 725836, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->725836 +2017-02-18 06:55:33,831 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 867472 len: 867476 to MEMORY +2017-02-18 06:55:33,857 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 867472 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:33,861 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 867472, inMemoryMapOutputs.size() -> 2, commitMemory -> 725836, usedMemory ->1593308 +2017-02-18 06:55:33,880 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2657280 len: 2657284 to MEMORY +2017-02-18 06:55:33,973 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2657280 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:34,001 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2657280, inMemoryMapOutputs.size() -> 3, commitMemory -> 1593308, usedMemory ->4250588 +2017-02-18 06:55:34,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:34,004 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:34,004 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:34,006 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:34,006 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4250568 bytes +2017-02-18 06:55:34,812 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% +2017-02-18 06:55:35,359 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4250588 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:35,361 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4250588 bytes from disk +2017-02-18 06:55:35,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:35,366 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:35,369 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4250579 bytes +2017-02-18 06:55:35,370 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:36,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 06:55:36,771 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:36,771 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000005_0 is allowed to commit now +2017-02-18 06:55:36,783 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000005 +2017-02-18 06:55:36,787 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:36,793 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000005_0' done. +2017-02-18 06:55:36,794 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000005_0 +2017-02-18 06:55:36,794 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000006_0 +2017-02-18 06:55:36,803 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:36,804 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:36,808 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@17d602ac +2017-02-18 06:55:36,815 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:55:36,831 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:36,847 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:36,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 855513 len: 855517 to MEMORY +2017-02-18 06:55:36,868 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 855513 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:36,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 855513, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->855513 +2017-02-18 06:55:36,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1080418 len: 1080422 to MEMORY +2017-02-18 06:55:36,939 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1080418 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:36,943 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1080418, inMemoryMapOutputs.size() -> 2, commitMemory -> 855513, usedMemory ->1935931 +2017-02-18 06:55:36,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 3036363 len: 3036367 to MEMORY +2017-02-18 06:55:37,056 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3036363 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:37,072 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3036363, inMemoryMapOutputs.size() -> 3, commitMemory -> 1935931, usedMemory ->4972294 +2017-02-18 06:55:37,073 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:37,074 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:37,075 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:37,077 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:37,078 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4972265 bytes +2017-02-18 06:55:37,818 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 06:55:38,744 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4972294 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:38,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4972294 bytes from disk +2017-02-18 06:55:38,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:38,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:38,755 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4972282 bytes +2017-02-18 06:55:38,757 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:40,406 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 06:55:40,413 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:40,413 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000006_0 is allowed to commit now +2017-02-18 06:55:40,414 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000006 +2017-02-18 06:55:40,427 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:40,430 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000006_0' done. +2017-02-18 06:55:40,431 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000006_0 +2017-02-18 06:55:40,432 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000007_0 +2017-02-18 06:55:40,440 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:40,441 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:40,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4258f598 +2017-02-18 06:55:40,457 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:40,480 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:40,490 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 685165 len: 685169 to MEMORY +2017-02-18 06:55:40,501 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 685165 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:40,513 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 685165, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->685165 +2017-02-18 06:55:40,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 821532 len: 821536 to MEMORY +2017-02-18 06:55:40,566 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 821532 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:40,566 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 821532, inMemoryMapOutputs.size() -> 2, commitMemory -> 685165, usedMemory ->1506697 +2017-02-18 06:55:40,592 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2552612 len: 2552616 to MEMORY +2017-02-18 06:55:40,694 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2552612 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:40,721 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2552612, inMemoryMapOutputs.size() -> 3, commitMemory -> 1506697, usedMemory ->4059309 +2017-02-18 06:55:40,724 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:40,725 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:40,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:40,727 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:40,727 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4059289 bytes +2017-02-18 06:55:40,824 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% +2017-02-18 06:55:42,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4059309 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:42,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4059309 bytes from disk +2017-02-18 06:55:42,173 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:42,173 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:42,174 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4059300 bytes +2017-02-18 06:55:42,174 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:43,477 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 06:55:43,510 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:43,514 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000007_0 is allowed to commit now +2017-02-18 06:55:43,515 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000007 +2017-02-18 06:55:43,522 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:43,526 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000007_0' done. +2017-02-18 06:55:43,528 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000007_0 +2017-02-18 06:55:43,531 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000008_0 +2017-02-18 06:55:43,541 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:43,542 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:43,549 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@39a3779e +2017-02-18 06:55:43,562 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:43,580 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:43,602 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 1500234 len: 1500238 to MEMORY +2017-02-18 06:55:43,638 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1500234 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:43,663 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1500234, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1500234 +2017-02-18 06:55:43,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 1360514 len: 1360518 to MEMORY +2017-02-18 06:55:43,709 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1360514 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:43,723 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1360514, inMemoryMapOutputs.size() -> 2, commitMemory -> 1500234, usedMemory ->2860748 +2017-02-18 06:55:43,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 5197932 len: 5197936 to MEMORY +2017-02-18 06:55:43,829 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:55:43,987 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5197932 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:43,995 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5197932, inMemoryMapOutputs.size() -> 3, commitMemory -> 2860748, usedMemory ->8058680 +2017-02-18 06:55:43,997 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:43,998 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:43,998 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:44,001 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:44,001 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8058660 bytes +2017-02-18 06:55:44,838 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 06:55:46,940 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8058680 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:46,942 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8058680 bytes from disk +2017-02-18 06:55:46,942 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:46,942 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:46,943 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8058671 bytes +2017-02-18 06:55:46,943 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:49,562 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:49,834 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 06:55:49,851 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:49,852 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-18 06:55:49,854 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000008_0 is allowed to commit now +2017-02-18 06:55:49,859 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000008 +2017-02-18 06:55:49,872 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:49,875 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000008_0' done. +2017-02-18 06:55:49,876 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000008_0 +2017-02-18 06:55:49,876 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1538746324_0001_r_000009_0 +2017-02-18 06:55:49,885 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 06:55:49,885 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 06:55:49,886 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@552a2a7d +2017-02-18 06:55:49,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 06:55:49,910 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1538746324_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 06:55:49,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1538746324_0001_m_000002_0 decomp: 596065 len: 596069 to MEMORY +2017-02-18 06:55:49,964 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 596065 bytes from map-output for attempt_local1538746324_0001_m_000002_0 +2017-02-18 06:55:49,966 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 596065, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->596065 +2017-02-18 06:55:49,981 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1538746324_0001_m_000001_0 decomp: 835740 len: 835744 to MEMORY +2017-02-18 06:55:50,026 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 835740 bytes from map-output for attempt_local1538746324_0001_m_000001_0 +2017-02-18 06:55:50,043 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 835740, inMemoryMapOutputs.size() -> 2, commitMemory -> 596065, usedMemory ->1431805 +2017-02-18 06:55:50,046 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1538746324_0001_m_000000_0 decomp: 2114762 len: 2114766 to MEMORY +2017-02-18 06:55:50,167 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2114762 bytes from map-output for attempt_local1538746324_0001_m_000000_0 +2017-02-18 06:55:50,184 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2114762, inMemoryMapOutputs.size() -> 3, commitMemory -> 1431805, usedMemory ->3546567 +2017-02-18 06:55:50,185 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 06:55:50,185 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:50,185 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 06:55:50,187 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 06:55:50,187 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3546539 bytes +2017-02-18 06:55:51,273 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3546567 bytes to disk to satisfy reduce memory limit +2017-02-18 06:55:51,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3546567 bytes from disk +2017-02-18 06:55:51,275 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 06:55:51,275 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 06:55:51,276 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3546555 bytes +2017-02-18 06:55:51,276 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:52,418 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1538746324_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 06:55:52,426 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 06:55:52,426 INFO org.apache.hadoop.mapred.Task: Task attempt_local1538746324_0001_r_000009_0 is allowed to commit now +2017-02-18 06:55:52,427 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1538746324_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output/_temporary/0/task_local1538746324_0001_r_000009 +2017-02-18 06:55:52,441 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 06:55:52,443 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1538746324_0001_r_000009_0' done. +2017-02-18 06:55:52,443 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1538746324_0001_r_000009_0 +2017-02-18 06:55:52,444 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 06:55:52,659 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1538746324_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 06:55:52,861 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 06:55:52,861 INFO org.apache.hadoop.mapreduce.Job: Job job_local1538746324_0001 failed with state FAILED due to: NA +2017-02-18 06:55:53,172 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=921244792 + FILE: Number of bytes written=961235798 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=52996307 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Reduce input groups=217527 + Reduce shuffle bytes=52996307 + Reduce input records=4678719 + Reduce output records=124 + Spilled Records=9357438 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=817 + Total committed heap usage (bytes)=2551959552 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=1326 +2017-02-18 07:01:48,434 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 07:01:50,939 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 07:01:50,976 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 07:01:53,187 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 07:01:53,292 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 07:01:53,850 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 07:01:55,706 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1461620831_0001 +2017-02-18 07:01:57,496 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 07:01:57,502 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1461620831_0001 +2017-02-18 07:01:57,521 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 07:01:57,596 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:01:57,610 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 07:01:58,116 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 07:01:58,117 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:01:58,404 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:01:58,524 INFO org.apache.hadoop.mapreduce.Job: Job job_local1461620831_0001 running in uber mode : false +2017-02-18 07:01:58,528 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 07:01:58,537 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:01:58,566 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 07:01:59,639 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:01:59,640 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:01:59,671 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:01:59,717 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:02:04,512 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:02:04,545 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-18 07:02:07,525 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:02:07,555 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-18 07:02:10,531 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:02:10,565 INFO org.apache.hadoop.mapreduce.Job: map 12% reduce 0% +2017-02-18 07:02:13,544 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:02:13,625 INFO org.apache.hadoop.mapreduce.Job: map 18% reduce 0% +2017-02-18 07:02:15,477 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:02:15,479 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:02:15,479 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:02:15,479 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 07:02:15,480 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 07:02:16,545 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:16,633 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 07:02:19,549 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:22,555 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:25,558 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:28,561 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:31,563 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:34,564 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:37,566 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:40,570 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:50,701 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:02:50,770 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 07:02:50,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:02:50,808 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_m_000000_0' done. +2017-02-18 07:02:50,810 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:02:50,811 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:02:50,818 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:02:50,819 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:02:50,829 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 07:02:51,218 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:02:51,225 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:02:51,225 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:02:51,226 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:02:51,226 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:02:51,235 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:02:51,256 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:02:51,743 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:02:55,446 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 07:02:55,448 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 07:02:55,769 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 07:02:56,862 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:02:57,773 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 07:02:59,873 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:03:02,880 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:03:05,883 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:03:06,047 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:03:06,067 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 07:03:06,078 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:03:06,086 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_m_000001_0' done. +2017-02-18 07:03:06,086 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:06,087 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:06,100 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:06,101 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:06,110 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 07:03:06,563 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:03:06,567 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:03:06,568 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:03:06,576 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:03:06,577 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:03:06,582 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:03:06,592 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:03:06,803 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:03:09,718 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 07:03:09,723 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 07:03:09,808 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 07:03:12,116 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:03:12,816 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 07:03:15,123 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:03:17,874 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:03:17,897 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 07:03:17,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:03:17,914 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_m_000002_0' done. +2017-02-18 07:03:17,915 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:17,916 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 07:03:18,018 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 07:03:18,018 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000000_0 +2017-02-18 07:03:18,087 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:18,094 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:18,147 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@956ff4d +2017-02-18 07:03:18,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:18,338 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:18,686 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 878989 len: 878993 to MEMORY +2017-02-18 07:03:18,769 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 878989 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:18,771 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 878989, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->878989 +2017-02-18 07:03:18,832 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:03:18,836 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 864040 len: 864044 to MEMORY +2017-02-18 07:03:18,839 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 864040 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:18,860 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 864040, inMemoryMapOutputs.size() -> 2, commitMemory -> 878989, usedMemory ->1743029 +2017-02-18 07:03:18,864 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2462807 len: 2462811 to MEMORY +2017-02-18 07:03:18,986 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2462807 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:18,987 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2462807, inMemoryMapOutputs.size() -> 3, commitMemory -> 1743029, usedMemory ->4205836 +2017-02-18 07:03:18,988 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:18,989 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:18,989 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:19,037 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:19,057 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4205810 bytes +2017-02-18 07:03:21,717 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4205836 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:21,718 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4205836 bytes from disk +2017-02-18 07:03:21,736 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:21,737 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:21,737 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4205821 bytes +2017-02-18 07:03:21,738 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:21,880 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 07:03:24,107 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:24,620 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 07:03:24,634 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:24,638 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000000_0 is allowed to commit now +2017-02-18 07:03:24,641 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000000 +2017-02-18 07:03:24,647 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:24,654 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000000_0' done. +2017-02-18 07:03:24,655 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000000_0 +2017-02-18 07:03:24,656 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000001_0 +2017-02-18 07:03:24,669 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:24,670 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:24,670 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@22b1221b +2017-02-18 07:03:24,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:24,697 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:24,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 1021001 len: 1021005 to MEMORY +2017-02-18 07:03:24,731 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1021001 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:24,759 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1021001, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1021001 +2017-02-18 07:03:24,781 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 2021094 len: 2021098 to MEMORY +2017-02-18 07:03:24,835 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2021094 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:24,836 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2021094, inMemoryMapOutputs.size() -> 2, commitMemory -> 1021001, usedMemory ->3042095 +2017-02-18 07:03:24,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 3748098 len: 3748102 to MEMORY +2017-02-18 07:03:24,858 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:03:25,029 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3748098 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:25,054 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3748098, inMemoryMapOutputs.size() -> 3, commitMemory -> 3042095, usedMemory ->6790193 +2017-02-18 07:03:25,056 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:25,057 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:25,057 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:25,058 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:25,059 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6790184 bytes +2017-02-18 07:03:25,868 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 07:03:27,760 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6790193 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:27,763 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6790193 bytes from disk +2017-02-18 07:03:27,763 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:27,763 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:27,769 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6790186 bytes +2017-02-18 07:03:27,772 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:30,546 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 07:03:30,557 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:30,557 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000001_0 is allowed to commit now +2017-02-18 07:03:30,558 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000001 +2017-02-18 07:03:30,568 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:30,574 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000001_0' done. +2017-02-18 07:03:30,575 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000001_0 +2017-02-18 07:03:30,575 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000002_0 +2017-02-18 07:03:30,588 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:30,589 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:30,590 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@16655c61 +2017-02-18 07:03:30,599 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:30,619 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:30,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 1216330 len: 1216334 to MEMORY +2017-02-18 07:03:30,658 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1216330 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:30,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1216330, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1216330 +2017-02-18 07:03:30,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1268271 len: 1268275 to MEMORY +2017-02-18 07:03:30,698 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1268271 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:30,722 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1268271, inMemoryMapOutputs.size() -> 2, commitMemory -> 1216330, usedMemory ->2484601 +2017-02-18 07:03:30,734 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 4957197 len: 4957201 to MEMORY +2017-02-18 07:03:30,879 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:03:30,959 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4957197 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:30,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4957197, inMemoryMapOutputs.size() -> 3, commitMemory -> 2484601, usedMemory ->7441798 +2017-02-18 07:03:30,973 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:30,974 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:30,974 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:30,977 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:30,977 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7441769 bytes +2017-02-18 07:03:31,882 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 07:03:33,615 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7441798 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:33,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7441798 bytes from disk +2017-02-18 07:03:33,623 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:33,623 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:33,629 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7441780 bytes +2017-02-18 07:03:33,632 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:36,306 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 07:03:36,325 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:36,333 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000002_0 is allowed to commit now +2017-02-18 07:03:36,337 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000002 +2017-02-18 07:03:36,341 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:36,343 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000002_0' done. +2017-02-18 07:03:36,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000002_0 +2017-02-18 07:03:36,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000003_0 +2017-02-18 07:03:36,363 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:36,364 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:36,364 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@63cf2179 +2017-02-18 07:03:36,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:36,396 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:36,410 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 686549 len: 686553 to MEMORY +2017-02-18 07:03:36,418 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 686549 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:36,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 686549, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->686549 +2017-02-18 07:03:36,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 947339 len: 947343 to MEMORY +2017-02-18 07:03:36,490 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 947339 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:36,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 947339, inMemoryMapOutputs.size() -> 2, commitMemory -> 686549, usedMemory ->1633888 +2017-02-18 07:03:36,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2553004 len: 2553008 to MEMORY +2017-02-18 07:03:36,707 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2553004 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:36,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2553004, inMemoryMapOutputs.size() -> 3, commitMemory -> 1633888, usedMemory ->4186892 +2017-02-18 07:03:36,721 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:36,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:36,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:36,728 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:36,728 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4186872 bytes +2017-02-18 07:03:36,892 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 07:03:38,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4186892 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:38,215 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4186892 bytes from disk +2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4186881 bytes +2017-02-18 07:03:38,216 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:39,608 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 07:03:39,617 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:39,617 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000003_0 is allowed to commit now +2017-02-18 07:03:39,632 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000003 +2017-02-18 07:03:39,636 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:39,638 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000003_0' done. +2017-02-18 07:03:39,639 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000003_0 +2017-02-18 07:03:39,639 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000004_0 +2017-02-18 07:03:39,650 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:39,652 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:39,652 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2296ff51 +2017-02-18 07:03:39,676 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:39,690 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:39,704 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 853557 len: 853561 to MEMORY +2017-02-18 07:03:39,714 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 853557 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:39,740 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 853557, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->853557 +2017-02-18 07:03:39,755 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1002557 len: 1002561 to MEMORY +2017-02-18 07:03:39,777 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1002557 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:39,780 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1002557, inMemoryMapOutputs.size() -> 2, commitMemory -> 853557, usedMemory ->1856114 +2017-02-18 07:03:39,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 3061786 len: 3061790 to MEMORY +2017-02-18 07:03:39,900 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:03:39,941 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3061786 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:39,949 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3061786, inMemoryMapOutputs.size() -> 3, commitMemory -> 1856114, usedMemory ->4917900 +2017-02-18 07:03:39,959 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:39,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:39,960 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:39,963 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:39,963 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4917879 bytes +2017-02-18 07:03:40,901 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 07:03:41,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4917900 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:41,756 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4917900 bytes from disk +2017-02-18 07:03:41,757 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:41,758 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:41,760 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4917892 bytes +2017-02-18 07:03:41,762 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:43,477 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 07:03:43,485 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:43,485 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000004_0 is allowed to commit now +2017-02-18 07:03:43,486 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000004 +2017-02-18 07:03:43,496 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:43,503 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000004_0' done. +2017-02-18 07:03:43,504 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000004_0 +2017-02-18 07:03:43,505 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000005_0 +2017-02-18 07:03:43,510 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:43,512 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:43,521 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@347e12a +2017-02-18 07:03:43,529 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:43,550 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:43,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 767280 len: 767284 to MEMORY +2017-02-18 07:03:43,573 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 767280 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:43,588 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 767280, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->767280 +2017-02-18 07:03:43,600 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 894060 len: 894064 to MEMORY +2017-02-18 07:03:43,638 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 894060 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:43,653 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 894060, inMemoryMapOutputs.size() -> 2, commitMemory -> 767280, usedMemory ->1661340 +2017-02-18 07:03:43,656 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2671328 len: 2671332 to MEMORY +2017-02-18 07:03:43,774 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2671328 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:43,783 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2671328, inMemoryMapOutputs.size() -> 3, commitMemory -> 1661340, usedMemory ->4332668 +2017-02-18 07:03:43,788 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:43,789 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:43,789 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:43,793 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:43,794 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4332653 bytes +2017-02-18 07:03:43,911 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% +2017-02-18 07:03:45,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4332668 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4332668 bytes from disk +2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:45,369 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4332659 bytes +2017-02-18 07:03:45,370 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:46,909 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 07:03:46,936 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:46,944 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000005_0 is allowed to commit now +2017-02-18 07:03:46,946 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000005 +2017-02-18 07:03:46,951 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:46,956 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000005_0' done. +2017-02-18 07:03:46,957 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000005_0 +2017-02-18 07:03:46,958 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000006_0 +2017-02-18 07:03:46,973 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:46,974 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:46,974 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@befc7b6 +2017-02-18 07:03:46,980 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:46,995 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:47,012 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 967027 len: 967031 to MEMORY +2017-02-18 07:03:47,033 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 967027 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:47,043 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 967027, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->967027 +2017-02-18 07:03:47,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1152263 len: 1152267 to MEMORY +2017-02-18 07:03:47,090 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1152263 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:47,115 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1152263, inMemoryMapOutputs.size() -> 2, commitMemory -> 967027, usedMemory ->2119290 +2017-02-18 07:03:47,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 3388337 len: 3388341 to MEMORY +2017-02-18 07:03:47,270 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3388337 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:47,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3388337, inMemoryMapOutputs.size() -> 3, commitMemory -> 2119290, usedMemory ->5507627 +2017-02-18 07:03:47,277 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:47,278 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:47,279 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:47,280 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:47,280 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5507601 bytes +2017-02-18 07:03:47,922 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 07:03:49,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5507627 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:49,302 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5507627 bytes from disk +2017-02-18 07:03:49,302 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:49,305 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:49,307 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5507615 bytes +2017-02-18 07:03:49,311 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:51,240 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 07:03:51,288 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:51,293 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000006_0 is allowed to commit now +2017-02-18 07:03:51,296 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000006 +2017-02-18 07:03:51,301 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:51,307 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000006_0' done. +2017-02-18 07:03:51,308 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000006_0 +2017-02-18 07:03:51,309 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000007_0 +2017-02-18 07:03:51,319 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:51,321 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:51,321 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@d62d2df +2017-02-18 07:03:51,328 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:51,352 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:51,364 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 710901 len: 710905 to MEMORY +2017-02-18 07:03:51,373 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 710901 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:51,393 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 710901, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->710901 +2017-02-18 07:03:51,396 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 824970 len: 824974 to MEMORY +2017-02-18 07:03:51,441 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 824970 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:51,449 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 824970, inMemoryMapOutputs.size() -> 2, commitMemory -> 710901, usedMemory ->1535871 +2017-02-18 07:03:51,473 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 2633768 len: 2633772 to MEMORY +2017-02-18 07:03:51,589 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2633768 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:51,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2633768, inMemoryMapOutputs.size() -> 3, commitMemory -> 1535871, usedMemory ->4169639 +2017-02-18 07:03:51,611 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:51,612 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:51,612 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:51,613 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:51,614 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4169616 bytes +2017-02-18 07:03:51,941 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% +2017-02-18 07:03:53,067 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4169639 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:53,070 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4169639 bytes from disk +2017-02-18 07:03:53,070 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:53,070 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:53,076 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4169630 bytes +2017-02-18 07:03:53,078 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:54,425 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 07:03:54,436 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:54,436 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000007_0 is allowed to commit now +2017-02-18 07:03:54,437 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000007 +2017-02-18 07:03:54,458 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:03:54,458 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000007_0' done. +2017-02-18 07:03:54,462 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000007_0 +2017-02-18 07:03:54,462 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000008_0 +2017-02-18 07:03:54,483 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:03:54,484 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:03:54,484 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7af45a93 +2017-02-18 07:03:54,502 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:03:54,522 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:03:54,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 1462439 len: 1462443 to MEMORY +2017-02-18 07:03:54,578 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1462439 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:03:54,597 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1462439, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1462439 +2017-02-18 07:03:54,600 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 1510887 len: 1510891 to MEMORY +2017-02-18 07:03:54,643 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1510887 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:03:54,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1510887, inMemoryMapOutputs.size() -> 2, commitMemory -> 1462439, usedMemory ->2973326 +2017-02-18 07:03:54,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 5237666 len: 5237670 to MEMORY +2017-02-18 07:03:54,885 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5237666 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:03:54,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5237666, inMemoryMapOutputs.size() -> 3, commitMemory -> 2973326, usedMemory ->8210992 +2017-02-18 07:03:54,899 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:03:54,900 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:03:54,900 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:03:54,912 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:03:54,912 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8210965 bytes +2017-02-18 07:03:54,950 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 07:03:58,179 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8210992 bytes to disk to satisfy reduce memory limit +2017-02-18 07:03:58,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8210992 bytes from disk +2017-02-18 07:03:58,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:03:58,182 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:03:58,191 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8210983 bytes +2017-02-18 07:03:58,192 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:04:00,532 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:04:00,970 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 89% +2017-02-18 07:04:01,265 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 07:04:01,282 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:04:01,284 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000008_0 is allowed to commit now +2017-02-18 07:04:01,285 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000008 +2017-02-18 07:04:01,292 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:04:01,295 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000008_0' done. +2017-02-18 07:04:01,295 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000008_0 +2017-02-18 07:04:01,296 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1461620831_0001_r_000009_0 +2017-02-18 07:04:01,311 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:04:01,312 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:04:01,312 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3cec8446 +2017-02-18 07:04:01,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:04:01,334 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1461620831_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:04:01,349 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1461620831_0001_m_000002_0 decomp: 563620 len: 563624 to MEMORY +2017-02-18 07:04:01,373 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 563620 bytes from map-output for attempt_local1461620831_0001_m_000002_0 +2017-02-18 07:04:01,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 563620, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->563620 +2017-02-18 07:04:01,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1461620831_0001_m_000001_0 decomp: 733075 len: 733079 to MEMORY +2017-02-18 07:04:01,425 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 733075 bytes from map-output for attempt_local1461620831_0001_m_000001_0 +2017-02-18 07:04:01,430 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 733075, inMemoryMapOutputs.size() -> 2, commitMemory -> 563620, usedMemory ->1296695 +2017-02-18 07:04:01,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1461620831_0001_m_000000_0 decomp: 1935947 len: 1935951 to MEMORY +2017-02-18 07:04:01,517 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1935947 bytes from map-output for attempt_local1461620831_0001_m_000000_0 +2017-02-18 07:04:01,535 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1935947, inMemoryMapOutputs.size() -> 3, commitMemory -> 1296695, usedMemory ->3232642 +2017-02-18 07:04:01,537 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:04:01,538 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:04:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:04:01,539 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:04:01,539 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3232620 bytes +2017-02-18 07:04:01,973 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-18 07:04:02,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3232642 bytes to disk to satisfy reduce memory limit +2017-02-18 07:04:02,660 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3232642 bytes from disk +2017-02-18 07:04:02,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:04:02,666 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:04:02,668 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3232631 bytes +2017-02-18 07:04:02,670 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:04:03,658 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1461620831_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 07:04:03,693 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:04:03,696 INFO org.apache.hadoop.mapred.Task: Task attempt_local1461620831_0001_r_000009_0 is allowed to commit now +2017-02-18 07:04:03,697 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1461620831_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local1461620831_0001_r_000009 +2017-02-18 07:04:03,705 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:04:03,710 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1461620831_0001_r_000009_0' done. +2017-02-18 07:04:03,711 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1461620831_0001_r_000009_0 +2017-02-18 07:04:03,712 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 07:04:03,939 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1461620831_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 07:04:03,980 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:04:03,980 INFO org.apache.hadoop.mapreduce.Job: Job job_local1461620831_0001 failed with state FAILED due to: NA +2017-02-18 07:04:04,376 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=919728132 + FILE: Number of bytes written=960477658 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=52996307 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Reduce input groups=195336 + Reduce shuffle bytes=52996307 + Reduce input records=4678719 + Reduce output records=119 + Spilled Records=9357438 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=926 + Total committed heap usage (bytes)=2551959552 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=1297 +2017-02-18 07:05:58,046 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 07:06:00,716 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 07:06:00,740 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 07:06:02,868 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 07:06:02,922 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 1 +2017-02-18 07:06:03,338 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:1 +2017-02-18 07:06:04,825 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1414308832_0001 +2017-02-18 07:06:06,783 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 07:06:06,802 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1414308832_0001 +2017-02-18 07:06:06,814 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 07:06:06,956 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:06,976 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 07:06:07,402 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 07:06:07,403 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:07,695 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:07,806 INFO org.apache.hadoop.mapreduce.Job: Job job_local1414308832_0001 running in uber mode : false +2017-02-18 07:06:07,810 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 07:06:07,871 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:07,877 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 07:06:08,639 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:06:08,642 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:06:08,642 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:06:08,643 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:06:08,643 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:06:08,672 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:06:08,713 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:06:13,796 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:06:13,833 INFO org.apache.hadoop.mapreduce.Job: map 27% reduce 0% +2017-02-18 07:06:15,764 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:06:15,774 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:06:15,776 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:06:15,776 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 07:06:15,777 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 07:06:16,812 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:06:16,842 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 07:06:19,818 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:06:22,822 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:06:24,736 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:06:24,781 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 07:06:24,794 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:06:24,806 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_m_000000_0' done. +2017-02-18 07:06:24,806 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:24,807 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 07:06:24,876 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:06:24,935 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 07:06:24,936 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000000_0 +2017-02-18 07:06:25,000 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:25,002 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:25,006 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6dbf1737 +2017-02-18 07:06:25,128 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:25,172 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:25,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 878989 len: 878993 to MEMORY +2017-02-18 07:06:25,466 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 878989 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:25,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 878989, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->878989 +2017-02-18 07:06:25,514 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:25,516 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:25,516 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:25,557 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:25,570 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 878982 bytes +2017-02-18 07:06:26,599 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 878989 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:26,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 878993 bytes from disk +2017-02-18 07:06:26,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:26,603 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:26,604 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 878982 bytes +2017-02-18 07:06:26,618 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:26,683 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 07:06:28,244 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 07:06:28,269 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:28,270 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000000_0 is allowed to commit now +2017-02-18 07:06:28,283 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000000 +2017-02-18 07:06:28,284 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:28,284 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000000_0' done. +2017-02-18 07:06:28,285 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000000_0 +2017-02-18 07:06:28,285 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000001_0 +2017-02-18 07:06:28,316 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:28,317 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:28,318 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@661ed391 +2017-02-18 07:06:28,323 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:28,340 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:28,347 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 1021001 len: 1021005 to MEMORY +2017-02-18 07:06:28,360 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1021001 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:28,388 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1021001, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1021001 +2017-02-18 07:06:28,389 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:28,390 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:28,390 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:28,392 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:28,393 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1020998 bytes +2017-02-18 07:06:28,885 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 07:06:28,894 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1021001 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:28,896 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1021005 bytes from disk +2017-02-18 07:06:28,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:28,900 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:28,902 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1020998 bytes +2017-02-18 07:06:28,906 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:29,480 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 07:06:29,495 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:29,504 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000001_0 is allowed to commit now +2017-02-18 07:06:29,506 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000001 +2017-02-18 07:06:29,510 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:29,521 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000001_0' done. +2017-02-18 07:06:29,522 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000001_0 +2017-02-18 07:06:29,522 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000002_0 +2017-02-18 07:06:29,533 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:29,534 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:29,537 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70f6431f +2017-02-18 07:06:29,557 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:29,575 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:29,598 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 1216330 len: 1216334 to MEMORY +2017-02-18 07:06:29,642 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1216330 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:29,643 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1216330, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1216330 +2017-02-18 07:06:29,643 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:29,644 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:29,645 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:29,646 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:29,646 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1216320 bytes +2017-02-18 07:06:29,887 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 07:06:30,172 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1216330 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:30,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1216334 bytes from disk +2017-02-18 07:06:30,176 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:30,177 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:30,177 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1216320 bytes +2017-02-18 07:06:30,182 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:30,696 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 07:06:30,710 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:30,713 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000002_0 is allowed to commit now +2017-02-18 07:06:30,714 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000002 +2017-02-18 07:06:30,724 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:30,727 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000002_0' done. +2017-02-18 07:06:30,728 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000002_0 +2017-02-18 07:06:30,729 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000003_0 +2017-02-18 07:06:30,740 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:30,743 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:30,749 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@51ff177c +2017-02-18 07:06:30,766 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:30,779 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:30,796 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 686549 len: 686553 to MEMORY +2017-02-18 07:06:30,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 686549 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:30,817 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 686549, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->686549 +2017-02-18 07:06:30,820 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:30,822 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:30,822 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:30,823 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:30,823 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 686541 bytes +2017-02-18 07:06:30,892 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 07:06:31,114 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 686549 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:31,116 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 686553 bytes from disk +2017-02-18 07:06:31,120 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:31,121 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:31,122 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 686541 bytes +2017-02-18 07:06:31,124 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:31,419 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 07:06:31,438 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:31,447 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000003_0 is allowed to commit now +2017-02-18 07:06:31,448 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000003 +2017-02-18 07:06:31,455 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:31,463 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000003_0' done. +2017-02-18 07:06:31,464 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000003_0 +2017-02-18 07:06:31,464 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000004_0 +2017-02-18 07:06:31,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:31,474 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:31,474 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f927f23 +2017-02-18 07:06:31,482 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:31,509 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:31,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 853557 len: 853561 to MEMORY +2017-02-18 07:06:31,534 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 853557 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:31,543 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 853557, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->853557 +2017-02-18 07:06:31,557 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:31,558 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:31,558 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:31,560 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:31,560 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 853550 bytes +2017-02-18 07:06:31,901 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 07:06:31,947 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 853557 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:31,949 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 853561 bytes from disk +2017-02-18 07:06:31,954 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:31,955 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:31,965 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 853550 bytes +2017-02-18 07:06:31,965 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:32,370 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 07:06:32,384 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:32,386 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000004_0 is allowed to commit now +2017-02-18 07:06:32,392 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000004 +2017-02-18 07:06:32,398 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:32,401 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000004_0' done. +2017-02-18 07:06:32,401 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000004_0 +2017-02-18 07:06:32,402 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000005_0 +2017-02-18 07:06:32,413 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:32,414 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:32,414 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6d6d43cd +2017-02-18 07:06:32,421 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:32,442 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:32,458 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 767280 len: 767284 to MEMORY +2017-02-18 07:06:32,468 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 767280 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:32,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 767280, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->767280 +2017-02-18 07:06:32,492 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:32,493 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:32,493 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:32,495 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:32,495 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 767274 bytes +2017-02-18 07:06:32,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 767280 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:32,802 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 767284 bytes from disk +2017-02-18 07:06:32,803 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:32,804 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:32,809 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 767274 bytes +2017-02-18 07:06:32,813 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:32,907 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% +2017-02-18 07:06:33,099 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 07:06:33,130 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:33,140 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000005_0 is allowed to commit now +2017-02-18 07:06:33,142 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000005 +2017-02-18 07:06:33,150 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:33,158 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000005_0' done. +2017-02-18 07:06:33,159 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000005_0 +2017-02-18 07:06:33,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000006_0 +2017-02-18 07:06:33,177 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:33,178 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:33,178 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@39a83e27 +2017-02-18 07:06:33,189 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:33,205 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:33,219 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 967027 len: 967031 to MEMORY +2017-02-18 07:06:33,239 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 967027 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:33,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 967027, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->967027 +2017-02-18 07:06:33,245 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:33,246 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:33,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:33,248 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:33,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 967018 bytes +2017-02-18 07:06:33,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 967027 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:33,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 967031 bytes from disk +2017-02-18 07:06:33,653 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:33,654 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:33,655 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 967018 bytes +2017-02-18 07:06:33,657 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:33,909 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 07:06:34,078 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 07:06:34,090 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:34,093 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000006_0 is allowed to commit now +2017-02-18 07:06:34,098 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000006 +2017-02-18 07:06:34,103 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:34,105 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000006_0' done. +2017-02-18 07:06:34,106 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000006_0 +2017-02-18 07:06:34,107 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000007_0 +2017-02-18 07:06:34,121 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:34,122 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:34,122 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@528d9c2c +2017-02-18 07:06:34,126 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:34,142 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:34,159 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 710901 len: 710905 to MEMORY +2017-02-18 07:06:34,163 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 710901 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:34,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 710901, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->710901 +2017-02-18 07:06:34,177 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:34,178 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:34,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:34,180 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:34,184 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 710893 bytes +2017-02-18 07:06:34,496 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 710901 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:34,502 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 710905 bytes from disk +2017-02-18 07:06:34,504 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:34,504 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:34,505 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 710893 bytes +2017-02-18 07:06:34,507 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:34,767 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 07:06:34,787 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:34,797 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000007_0 is allowed to commit now +2017-02-18 07:06:34,799 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000007 +2017-02-18 07:06:34,806 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:34,807 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000007_0' done. +2017-02-18 07:06:34,808 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000007_0 +2017-02-18 07:06:34,809 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000008_0 +2017-02-18 07:06:34,819 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:34,820 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:34,825 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7934bf83 +2017-02-18 07:06:34,844 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:34,856 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:34,877 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 1462439 len: 1462443 to MEMORY +2017-02-18 07:06:34,911 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:06:34,929 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1462439 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:34,929 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1462439, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1462439 +2017-02-18 07:06:34,930 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:34,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:34,931 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:34,933 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:34,933 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1462429 bytes +2017-02-18 07:06:35,531 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 1462439 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:35,534 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 1462443 bytes from disk +2017-02-18 07:06:35,535 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:35,536 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:35,537 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 1462429 bytes +2017-02-18 07:06:35,538 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:35,912 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 07:06:36,164 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 07:06:36,168 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:36,169 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000008_0 is allowed to commit now +2017-02-18 07:06:36,169 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000008 +2017-02-18 07:06:36,186 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:36,189 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000008_0' done. +2017-02-18 07:06:36,191 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000008_0 +2017-02-18 07:06:36,192 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1414308832_0001_r_000009_0 +2017-02-18 07:06:36,202 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:06:36,203 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:06:36,207 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4fa16b41 +2017-02-18 07:06:36,220 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:06:36,242 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1414308832_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:06:36,259 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1414308832_0001_m_000000_0 decomp: 563620 len: 563624 to MEMORY +2017-02-18 07:06:36,265 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 563620 bytes from map-output for attempt_local1414308832_0001_m_000000_0 +2017-02-18 07:06:36,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 563620, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->563620 +2017-02-18 07:06:36,290 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:06:36,292 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:36,292 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 1 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:06:36,293 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:36,293 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 563614 bytes +2017-02-18 07:06:36,505 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 1 segments, 563620 bytes to disk to satisfy reduce memory limit +2017-02-18 07:06:36,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 563624 bytes from disk +2017-02-18 07:06:36,512 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:06:36,512 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:06:36,513 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 563614 bytes +2017-02-18 07:06:36,515 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:36,738 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1414308832_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 07:06:36,755 INFO org.apache.hadoop.mapred.LocalJobRunner: 1 / 1 copied. +2017-02-18 07:06:36,757 INFO org.apache.hadoop.mapred.Task: Task attempt_local1414308832_0001_r_000009_0 is allowed to commit now +2017-02-18 07:06:36,758 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1414308832_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i2/_temporary/0/task_local1414308832_0001_r_000009 +2017-02-18 07:06:36,767 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:06:36,770 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1414308832_0001_r_000009_0' done. +2017-02-18 07:06:36,773 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1414308832_0001_r_000009_0 +2017-02-18 07:06:36,774 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 07:06:36,916 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:06:36,937 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1414308832_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 07:06:37,921 INFO org.apache.hadoop.mapreduce.Job: Job job_local1414308832_0001 failed with state FAILED due to: NA +2017-02-18 07:06:38,139 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=151156389 + FILE: Number of bytes written=154418301 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=80476 + Map output records=793081 + Map output bytes=7541511 + Map output materialized bytes=9127733 + Input split bytes=118 + Combine input records=0 + Combine output records=0 + Reduce input groups=41775 + Reduce shuffle bytes=9127733 + Reduce input records=793081 + Reduce output records=26 + Spilled Records=1586162 + Shuffled Maps =10 + Failed Shuffles=0 + Merged Map outputs=10 + GC time elapsed (ms)=261 + Total committed heap usage (bytes)=1821749248 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=4454050 + File Output Format Counters + Bytes Written=344 +2017-02-18 07:08:55,332 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 07:08:57,528 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 07:08:57,561 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 07:08:59,544 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 07:08:59,596 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 07:09:00,022 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 07:09:01,311 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local484764157_0001 +2017-02-18 07:09:03,289 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 07:09:03,291 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local484764157_0001 +2017-02-18 07:09:03,305 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 07:09:03,360 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:09:03,371 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 07:09:03,810 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 07:09:03,812 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_m_000000_0 +2017-02-18 07:09:04,062 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:09:04,166 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:09:04,195 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 07:09:04,295 INFO org.apache.hadoop.mapreduce.Job: Job job_local484764157_0001 running in uber mode : false +2017-02-18 07:09:04,297 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 07:09:04,954 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:09:04,982 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:09:05,021 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:09:05,036 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:09:10,135 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:09:10,313 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-18 07:09:13,151 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:09:13,318 INFO org.apache.hadoop.mapreduce.Job: map 8% reduce 0% +2017-02-18 07:09:16,155 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:09:16,338 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 07:09:19,158 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:09:19,341 INFO org.apache.hadoop.mapreduce.Job: map 19% reduce 0% +2017-02-18 07:09:20,631 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 07:09:20,634 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 07:09:22,161 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:22,348 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 07:09:25,165 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:28,169 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:31,170 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:34,171 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:37,172 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:40,174 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:43,175 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:46,176 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:09:53,796 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:09:53,857 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 07:09:53,860 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:09:53,863 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_m_000000_0' done. +2017-02-18 07:09:53,878 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_m_000000_0 +2017-02-18 07:09:53,878 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_m_000001_0 +2017-02-18 07:09:53,883 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:09:53,884 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:09:53,886 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 07:09:54,297 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:09:54,307 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:09:54,308 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:09:54,308 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:09:54,309 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:09:54,314 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:09:54,333 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:09:54,416 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:09:59,653 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 07:09:59,660 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 07:09:59,920 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:10:00,425 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 07:10:02,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:10:05,933 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:10:08,938 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:10:09,893 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:10:09,926 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 07:10:09,933 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:10:09,938 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_m_000001_0' done. +2017-02-18 07:10:09,942 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:09,943 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:09,948 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:09,949 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:09,971 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 07:10:10,352 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:10:10,354 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:10:10,358 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:10:10,359 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:10:10,360 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:10:10,365 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:10:10,374 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:10:10,453 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:10:14,114 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 07:10:14,133 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 07:10:14,460 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 07:10:15,991 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:10:16,468 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 07:10:19,001 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:10:21,961 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:10:22,003 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:10:22,016 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 07:10:22,028 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:10:22,036 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_m_000002_0' done. +2017-02-18 07:10:22,037 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:22,038 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 07:10:22,146 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 07:10:22,147 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000000_0 +2017-02-18 07:10:22,220 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:22,221 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:22,258 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@194e2c2f +2017-02-18 07:10:22,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:22,450 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:22,488 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:10:22,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 878989 len: 878993 to MEMORY +2017-02-18 07:10:22,821 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 878989 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:22,851 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 878989, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->878989 +2017-02-18 07:10:22,886 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2462807 len: 2462811 to MEMORY +2017-02-18 07:10:22,956 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2462807 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:22,967 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2462807, inMemoryMapOutputs.size() -> 2, commitMemory -> 878989, usedMemory ->3341796 +2017-02-18 07:10:22,981 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 864040 len: 864044 to MEMORY +2017-02-18 07:10:22,986 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 864040 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:23,008 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 864040, inMemoryMapOutputs.size() -> 3, commitMemory -> 3341796, usedMemory ->4205836 +2017-02-18 07:10:23,009 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:23,010 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:23,010 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:23,066 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:23,068 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4205810 bytes +2017-02-18 07:10:25,129 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4205836 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:25,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4205836 bytes from disk +2017-02-18 07:10:25,142 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:25,144 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:25,145 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4205821 bytes +2017-02-18 07:10:25,150 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:25,239 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 07:10:27,819 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 07:10:27,845 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:27,846 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000000_0 is allowed to commit now +2017-02-18 07:10:27,847 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000000 +2017-02-18 07:10:27,860 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:27,861 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000000_0' done. +2017-02-18 07:10:27,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000000_0 +2017-02-18 07:10:27,861 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000001_0 +2017-02-18 07:10:27,879 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:27,880 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:27,881 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@731e0de +2017-02-18 07:10:27,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:27,917 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:27,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 1021001 len: 1021005 to MEMORY +2017-02-18 07:10:27,961 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1021001 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:27,979 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1021001, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1021001 +2017-02-18 07:10:27,990 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 3748098 len: 3748102 to MEMORY +2017-02-18 07:10:28,110 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3748098 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:28,117 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3748098, inMemoryMapOutputs.size() -> 2, commitMemory -> 1021001, usedMemory ->4769099 +2017-02-18 07:10:28,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 2021094 len: 2021098 to MEMORY +2017-02-18 07:10:28,163 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2021094 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:28,177 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2021094, inMemoryMapOutputs.size() -> 3, commitMemory -> 4769099, usedMemory ->6790193 +2017-02-18 07:10:28,183 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:28,184 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:28,184 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:28,187 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:28,187 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 6790184 bytes +2017-02-18 07:10:28,504 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 10% +2017-02-18 07:10:30,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6790193 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:30,800 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 6790193 bytes from disk +2017-02-18 07:10:30,800 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:30,800 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:30,801 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6790186 bytes +2017-02-18 07:10:30,801 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:33,873 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 07:10:33,884 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:33,890 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:33,895 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000001_0 is allowed to commit now +2017-02-18 07:10:33,902 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000001 +2017-02-18 07:10:33,905 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:33,912 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000001_0' done. +2017-02-18 07:10:33,913 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000001_0 +2017-02-18 07:10:33,914 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000002_0 +2017-02-18 07:10:33,931 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:33,932 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:33,933 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f08370b +2017-02-18 07:10:33,950 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:33,969 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:33,975 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 1216330 len: 1216334 to MEMORY +2017-02-18 07:10:33,998 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1216330 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:34,013 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1216330, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1216330 +2017-02-18 07:10:34,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 4957197 len: 4957201 to MEMORY +2017-02-18 07:10:34,165 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 4957197 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:34,172 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 4957197, inMemoryMapOutputs.size() -> 2, commitMemory -> 1216330, usedMemory ->6173527 +2017-02-18 07:10:34,185 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1268271 len: 1268275 to MEMORY +2017-02-18 07:10:34,192 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1268271 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:34,209 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1268271, inMemoryMapOutputs.size() -> 3, commitMemory -> 6173527, usedMemory ->7441798 +2017-02-18 07:10:34,211 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:34,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:34,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:34,217 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:34,217 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 7441769 bytes +2017-02-18 07:10:34,519 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 20% +2017-02-18 07:10:36,887 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 7441798 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:36,889 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 7441798 bytes from disk +2017-02-18 07:10:36,889 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:36,889 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:36,890 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 7441780 bytes +2017-02-18 07:10:36,897 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:39,695 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 07:10:39,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:39,733 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000002_0 is allowed to commit now +2017-02-18 07:10:39,735 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000002 +2017-02-18 07:10:39,740 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:39,746 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000002_0' done. +2017-02-18 07:10:39,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000002_0 +2017-02-18 07:10:39,748 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000003_0 +2017-02-18 07:10:39,758 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:39,759 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:39,759 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7655ff39 +2017-02-18 07:10:39,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:39,794 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:39,804 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 686549 len: 686553 to MEMORY +2017-02-18 07:10:39,814 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 686549 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:39,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 686549, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->686549 +2017-02-18 07:10:39,865 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2553004 len: 2553008 to MEMORY +2017-02-18 07:10:39,958 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2553004 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:39,960 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2553004, inMemoryMapOutputs.size() -> 2, commitMemory -> 686549, usedMemory ->3239553 +2017-02-18 07:10:39,967 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 947339 len: 947343 to MEMORY +2017-02-18 07:10:39,987 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 947339 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:40,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 947339, inMemoryMapOutputs.size() -> 3, commitMemory -> 3239553, usedMemory ->4186892 +2017-02-18 07:10:40,002 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:40,003 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:40,003 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:40,005 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:40,005 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4186872 bytes +2017-02-18 07:10:40,541 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 07:10:41,489 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4186892 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:41,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4186892 bytes from disk +2017-02-18 07:10:41,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:41,492 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:41,492 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4186881 bytes +2017-02-18 07:10:41,492 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:42,943 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 07:10:42,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:42,965 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000003_0 is allowed to commit now +2017-02-18 07:10:42,966 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000003 +2017-02-18 07:10:42,976 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:42,978 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000003_0' done. +2017-02-18 07:10:42,978 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000003_0 +2017-02-18 07:10:42,979 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000004_0 +2017-02-18 07:10:42,992 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:42,993 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:42,994 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@54babeeb +2017-02-18 07:10:42,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:43,031 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:43,065 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 853557 len: 853561 to MEMORY +2017-02-18 07:10:43,074 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 853557 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:43,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 853557, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->853557 +2017-02-18 07:10:43,108 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 3061786 len: 3061790 to MEMORY +2017-02-18 07:10:43,212 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3061786 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:43,212 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3061786, inMemoryMapOutputs.size() -> 2, commitMemory -> 853557, usedMemory ->3915343 +2017-02-18 07:10:43,220 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1002557 len: 1002561 to MEMORY +2017-02-18 07:10:43,238 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1002557 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:43,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1002557, inMemoryMapOutputs.size() -> 3, commitMemory -> 3915343, usedMemory ->4917900 +2017-02-18 07:10:43,257 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:43,258 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:43,258 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:43,260 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:43,260 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4917879 bytes +2017-02-18 07:10:43,583 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 07:10:45,164 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4917900 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:45,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4917900 bytes from disk +2017-02-18 07:10:45,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:45,166 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:45,167 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4917892 bytes +2017-02-18 07:10:45,167 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:46,884 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 07:10:46,899 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:46,903 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000004_0 is allowed to commit now +2017-02-18 07:10:46,905 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000004 +2017-02-18 07:10:46,911 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:46,917 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000004_0' done. +2017-02-18 07:10:46,918 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000004_0 +2017-02-18 07:10:46,919 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000005_0 +2017-02-18 07:10:46,930 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:46,931 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:46,931 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1808ffef +2017-02-18 07:10:46,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:46,967 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:46,980 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 767280 len: 767284 to MEMORY +2017-02-18 07:10:46,991 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 767280 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:47,015 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 767280, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->767280 +2017-02-18 07:10:47,018 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2671328 len: 2671332 to MEMORY +2017-02-18 07:10:47,079 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2671328 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:47,110 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2671328, inMemoryMapOutputs.size() -> 2, commitMemory -> 767280, usedMemory ->3438608 +2017-02-18 07:10:47,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 894060 len: 894064 to MEMORY +2017-02-18 07:10:47,132 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 894060 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:47,146 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 894060, inMemoryMapOutputs.size() -> 3, commitMemory -> 3438608, usedMemory ->4332668 +2017-02-18 07:10:47,148 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:47,149 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:47,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:47,150 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:47,151 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4332653 bytes +2017-02-18 07:10:47,595 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 50% +2017-02-18 07:10:48,614 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4332668 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:48,616 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4332668 bytes from disk +2017-02-18 07:10:48,618 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:48,618 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:48,619 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4332659 bytes +2017-02-18 07:10:48,619 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:50,071 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 07:10:50,076 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:50,076 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000005_0 is allowed to commit now +2017-02-18 07:10:50,077 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000005 +2017-02-18 07:10:50,088 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:50,090 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000005_0' done. +2017-02-18 07:10:50,091 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000005_0 +2017-02-18 07:10:50,092 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000006_0 +2017-02-18 07:10:50,100 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:50,101 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:50,110 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@28ae5b0b +2017-02-18 07:10:50,116 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:50,138 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:50,153 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 967027 len: 967031 to MEMORY +2017-02-18 07:10:50,160 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 967027 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:50,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 967027, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->967027 +2017-02-18 07:10:50,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 3388337 len: 3388341 to MEMORY +2017-02-18 07:10:50,286 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 3388337 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:50,291 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 3388337, inMemoryMapOutputs.size() -> 2, commitMemory -> 967027, usedMemory ->4355364 +2017-02-18 07:10:50,305 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1152263 len: 1152267 to MEMORY +2017-02-18 07:10:50,321 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1152263 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:50,330 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1152263, inMemoryMapOutputs.size() -> 3, commitMemory -> 4355364, usedMemory ->5507627 +2017-02-18 07:10:50,332 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:50,334 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:50,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:50,336 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:50,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 5507601 bytes +2017-02-18 07:10:50,602 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 07:10:52,360 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 5507627 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:52,362 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 5507627 bytes from disk +2017-02-18 07:10:52,362 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:52,368 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:52,369 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 5507615 bytes +2017-02-18 07:10:52,371 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:54,311 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 07:10:54,331 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:54,336 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000006_0 is allowed to commit now +2017-02-18 07:10:54,338 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000006 +2017-02-18 07:10:54,343 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:54,347 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000006_0' done. +2017-02-18 07:10:54,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000006_0 +2017-02-18 07:10:54,349 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000007_0 +2017-02-18 07:10:54,357 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:54,358 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:54,362 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68792330 +2017-02-18 07:10:54,374 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:54,401 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:54,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 710901 len: 710905 to MEMORY +2017-02-18 07:10:54,438 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 710901 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:54,438 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 710901, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->710901 +2017-02-18 07:10:54,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 2633768 len: 2633772 to MEMORY +2017-02-18 07:10:54,521 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2633768 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:54,551 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2633768, inMemoryMapOutputs.size() -> 2, commitMemory -> 710901, usedMemory ->3344669 +2017-02-18 07:10:54,554 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 824970 len: 824974 to MEMORY +2017-02-18 07:10:54,574 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 824970 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:54,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 824970, inMemoryMapOutputs.size() -> 3, commitMemory -> 3344669, usedMemory ->4169639 +2017-02-18 07:10:54,586 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:54,587 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:54,587 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:54,589 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:54,589 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 4169616 bytes +2017-02-18 07:10:54,634 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 70% +2017-02-18 07:10:55,946 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 4169639 bytes to disk to satisfy reduce memory limit +2017-02-18 07:10:55,947 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 4169639 bytes from disk +2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 4169630 bytes +2017-02-18 07:10:55,948 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:57,339 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 07:10:57,350 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:57,358 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000007_0 is allowed to commit now +2017-02-18 07:10:57,363 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000007 +2017-02-18 07:10:57,365 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:10:57,367 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000007_0' done. +2017-02-18 07:10:57,368 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000007_0 +2017-02-18 07:10:57,374 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000008_0 +2017-02-18 07:10:57,379 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:10:57,380 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:10:57,381 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@32609547 +2017-02-18 07:10:57,393 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:10:57,404 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:10:57,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 1462439 len: 1462443 to MEMORY +2017-02-18 07:10:57,436 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1462439 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:10:57,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1462439, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->1462439 +2017-02-18 07:10:57,479 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 5237666 len: 5237670 to MEMORY +2017-02-18 07:10:57,645 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:10:57,648 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 5237666 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:10:57,648 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 5237666, inMemoryMapOutputs.size() -> 2, commitMemory -> 1462439, usedMemory ->6700105 +2017-02-18 07:10:57,651 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 1510887 len: 1510891 to MEMORY +2017-02-18 07:10:57,665 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1510887 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:10:57,687 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1510887, inMemoryMapOutputs.size() -> 3, commitMemory -> 6700105, usedMemory ->8210992 +2017-02-18 07:10:57,694 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:10:57,695 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:10:57,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:10:57,703 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:10:57,704 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 8210965 bytes +2017-02-18 07:10:58,647 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 07:11:00,751 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 8210992 bytes to disk to satisfy reduce memory limit +2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 8210992 bytes from disk +2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:11:00,754 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 8210983 bytes +2017-02-18 07:11:00,755 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:11:03,386 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:11:03,659 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 90% +2017-02-18 07:11:03,801 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 07:11:03,811 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:11:03,817 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000008_0 is allowed to commit now +2017-02-18 07:11:03,820 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000008 +2017-02-18 07:11:03,830 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:11:03,833 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000008_0' done. +2017-02-18 07:11:03,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000008_0 +2017-02-18 07:11:03,839 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local484764157_0001_r_000009_0 +2017-02-18 07:11:03,848 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:11:03,862 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:11:03,875 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@9b82279 +2017-02-18 07:11:03,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:11:03,896 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local484764157_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:11:03,914 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local484764157_0001_m_000002_0 decomp: 563620 len: 563624 to MEMORY +2017-02-18 07:11:03,929 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 563620 bytes from map-output for attempt_local484764157_0001_m_000002_0 +2017-02-18 07:11:03,946 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 563620, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->563620 +2017-02-18 07:11:03,969 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local484764157_0001_m_000000_0 decomp: 1935947 len: 1935951 to MEMORY +2017-02-18 07:11:04,044 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 1935947 bytes from map-output for attempt_local484764157_0001_m_000000_0 +2017-02-18 07:11:04,047 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 1935947, inMemoryMapOutputs.size() -> 2, commitMemory -> 563620, usedMemory ->2499567 +2017-02-18 07:11:04,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local484764157_0001_m_000001_0 decomp: 733075 len: 733079 to MEMORY +2017-02-18 07:11:04,076 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 733075 bytes from map-output for attempt_local484764157_0001_m_000001_0 +2017-02-18 07:11:04,089 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 733075, inMemoryMapOutputs.size() -> 3, commitMemory -> 2499567, usedMemory ->3232642 +2017-02-18 07:11:04,092 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:11:04,093 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:11:04,093 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:11:04,095 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:11:04,095 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 3232620 bytes +2017-02-18 07:11:05,090 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 3232642 bytes to disk to satisfy reduce memory limit +2017-02-18 07:11:05,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 3232642 bytes from disk +2017-02-18 07:11:05,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:11:05,092 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:11:05,093 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 3232631 bytes +2017-02-18 07:11:05,093 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:11:06,076 INFO org.apache.hadoop.mapred.Task: Task:attempt_local484764157_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 07:11:06,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:11:06,117 INFO org.apache.hadoop.mapred.Task: Task attempt_local484764157_0001_r_000009_0 is allowed to commit now +2017-02-18 07:11:06,118 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local484764157_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.i/_temporary/0/task_local484764157_0001_r_000009 +2017-02-18 07:11:06,128 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:11:06,131 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local484764157_0001_r_000009_0' done. +2017-02-18 07:11:06,132 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local484764157_0001_r_000009_0 +2017-02-18 07:11:06,133 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 07:11:06,356 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local484764157_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 07:11:06,670 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:11:06,670 INFO org.apache.hadoop.mapreduce.Job: Job job_local484764157_0001 failed with state FAILED due to: NA +2017-02-18 07:11:06,936 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=919728132 + FILE: Number of bytes written=960458626 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=52996307 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Reduce input groups=195336 + Reduce shuffle bytes=52996307 + Reduce input records=4678719 + Reduce output records=119 + Spilled Records=9357438 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=871 + Total committed heap usage (bytes)=2551959552 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=1297 +2017-02-18 07:35:20,526 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 07:35:22,634 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 07:35:22,642 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 07:35:24,655 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 07:35:24,723 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 07:35:25,182 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 07:35:26,701 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local378226183_0001 +2017-02-18 07:35:28,571 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 07:35:28,574 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local378226183_0001 +2017-02-18 07:35:28,595 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 07:35:28,670 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:35:28,679 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 07:35:29,112 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 07:35:29,114 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_m_000000_0 +2017-02-18 07:35:29,370 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:35:29,500 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:35:29,531 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 07:35:29,577 INFO org.apache.hadoop.mapreduce.Job: Job job_local378226183_0001 running in uber mode : false +2017-02-18 07:35:29,611 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 07:35:30,918 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:35:30,919 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:35:31,506 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:35:31,939 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:35:35,765 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:35:36,663 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-18 07:35:38,773 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:35:39,670 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% +2017-02-18 07:35:41,776 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:35:42,686 INFO org.apache.hadoop.mapreduce.Job: map 11% reduce 0% +2017-02-18 07:35:44,777 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:35:45,696 INFO org.apache.hadoop.mapreduce.Job: map 16% reduce 0% +2017-02-18 07:35:47,695 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:35:47,699 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:35:47,705 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:35:47,707 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 07:35:47,707 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 07:35:47,779 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:35:48,732 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 07:35:50,780 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:35:53,782 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:35:56,784 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:35:59,785 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:02,787 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:05,789 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:08,790 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:11,792 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:15,221 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:36:15,301 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 07:36:15,311 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:36:15,316 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_m_000000_0' done. +2017-02-18 07:36:15,318 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:15,319 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:15,328 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:15,329 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:15,356 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 07:36:15,807 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:36:15,812 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:36:15,815 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:36:15,815 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:36:15,816 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:36:15,816 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:36:15,829 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:36:15,848 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:36:21,514 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:36:21,613 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 07:36:21,613 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:36:21,613 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:36:21,614 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 07:36:21,614 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 07:36:21,838 INFO org.apache.hadoop.mapreduce.Job: map 55% reduce 0% +2017-02-18 07:36:24,614 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:24,850 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 07:36:27,615 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:29,379 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:36:29,393 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 07:36:29,402 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:36:29,408 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_m_000001_0' done. +2017-02-18 07:36:29,409 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:29,411 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:29,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:29,423 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:29,442 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 07:36:29,839 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 07:36:29,853 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 07:36:29,853 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 07:36:29,854 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 07:36:29,854 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 07:36:29,859 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 07:36:29,862 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:36:29,871 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 07:36:32,938 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 07:36:32,946 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 07:36:32,947 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 07:36:32,948 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 07:36:32,948 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 07:36:33,932 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 07:36:35,448 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:35,934 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 07:36:38,458 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 07:36:38,834 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 07:36:38,877 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 07:36:38,883 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 07:36:38,889 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_m_000002_0' done. +2017-02-18 07:36:38,890 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:38,897 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 07:36:38,945 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 07:36:39,045 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 07:36:39,046 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000000_0 +2017-02-18 07:36:39,127 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:39,128 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:39,171 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@712678ab +2017-02-18 07:36:39,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:39,396 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:39,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 07:36:39,804 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:39,822 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 +2017-02-18 07:36:39,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 30 len: 34 to MEMORY +2017-02-18 07:36:39,858 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:39,860 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->91 +2017-02-18 07:36:39,868 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 07:36:39,875 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:39,879 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 91, usedMemory ->122 +2017-02-18 07:36:39,882 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:39,883 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:39,883 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:39,948 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:39,950 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes +2017-02-18 07:36:39,958 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:39,960 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 122 bytes from disk +2017-02-18 07:36:39,984 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:39,984 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:39,985 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes +2017-02-18 07:36:39,992 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,087 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 07:36:40,097 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 07:36:40,107 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,115 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000000_0 is allowed to commit now +2017-02-18 07:36:40,117 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000000 +2017-02-18 07:36:40,127 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:40,129 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000000_0' done. +2017-02-18 07:36:40,130 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000000_0 +2017-02-18 07:36:40,131 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000001_0 +2017-02-18 07:36:40,142 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:40,143 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:40,143 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1b362c68 +2017-02-18 07:36:40,157 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:40,174 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:40,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 07:36:40,194 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:40,198 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 +2017-02-18 07:36:40,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 69 len: 73 to MEMORY +2017-02-18 07:36:40,227 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:40,233 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->223 +2017-02-18 07:36:40,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 07:36:40,250 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:40,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 3, commitMemory -> 223, usedMemory ->262 +2017-02-18 07:36:40,251 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:40,260 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,261 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:40,262 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:40,262 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes +2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 262 bytes from disk +2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:40,264 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:40,265 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes +2017-02-18 07:36:40,272 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,327 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 07:36:40,330 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,333 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000001_0 is allowed to commit now +2017-02-18 07:36:40,344 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000001 +2017-02-18 07:36:40,355 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:40,356 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000001_0' done. +2017-02-18 07:36:40,361 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000001_0 +2017-02-18 07:36:40,362 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000002_0 +2017-02-18 07:36:40,374 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:40,375 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:40,375 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@20bae047 +2017-02-18 07:36:40,383 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:40,410 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:40,429 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 07:36:40,430 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:40,435 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 +2017-02-18 07:36:40,439 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 41 len: 45 to MEMORY +2017-02-18 07:36:40,450 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:40,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->153 +2017-02-18 07:36:40,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 07:36:40,476 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:40,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 3, commitMemory -> 153, usedMemory ->204 +2017-02-18 07:36:40,482 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:40,483 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:40,485 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:40,485 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes +2017-02-18 07:36:40,489 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:40,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 204 bytes from disk +2017-02-18 07:36:40,497 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:40,498 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:40,499 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes +2017-02-18 07:36:40,504 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,560 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 07:36:40,562 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,562 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000002_0 is allowed to commit now +2017-02-18 07:36:40,563 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000002 +2017-02-18 07:36:40,578 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:40,579 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000002_0' done. +2017-02-18 07:36:40,579 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000002_0 +2017-02-18 07:36:40,579 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000003_0 +2017-02-18 07:36:40,600 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:40,601 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:40,601 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4e12001b +2017-02-18 07:36:40,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:40,634 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:40,643 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 07:36:40,655 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:40,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 +2017-02-18 07:36:40,674 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 11 len: 15 to MEMORY +2017-02-18 07:36:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:40,682 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 +2017-02-18 07:36:40,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 07:36:40,707 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:40,707 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 +2017-02-18 07:36:40,708 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:40,708 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,709 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:40,712 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:40,712 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes +2017-02-18 07:36:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:40,713 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk +2017-02-18 07:36:40,722 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:40,722 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:40,722 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes +2017-02-18 07:36:40,723 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 07:36:40,778 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,778 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000003_0 is allowed to commit now +2017-02-18 07:36:40,779 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000003 +2017-02-18 07:36:40,798 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:40,799 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000003_0' done. +2017-02-18 07:36:40,799 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000003_0 +2017-02-18 07:36:40,799 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000004_0 +2017-02-18 07:36:40,825 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:40,826 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:40,826 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@899cd30 +2017-02-18 07:36:40,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:40,859 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:40,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 07:36:40,880 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:40,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 +2017-02-18 07:36:40,889 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 33 len: 37 to MEMORY +2017-02-18 07:36:40,898 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:40,915 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->85 +2017-02-18 07:36:40,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 07:36:40,928 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:40,939 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 85, usedMemory ->116 +2017-02-18 07:36:40,940 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:40,941 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:40,941 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:40,943 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:40,944 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes +2017-02-18 07:36:40,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:40,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk +2017-02-18 07:36:40,953 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 07:36:40,962 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:40,962 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:40,962 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes +2017-02-18 07:36:40,963 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,049 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 07:36:41,051 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,051 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000004_0 is allowed to commit now +2017-02-18 07:36:41,052 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000004 +2017-02-18 07:36:41,053 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:41,053 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000004_0' done. +2017-02-18 07:36:41,053 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000004_0 +2017-02-18 07:36:41,054 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000005_0 +2017-02-18 07:36:41,077 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:41,078 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:41,087 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@76dab03c +2017-02-18 07:36:41,098 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:41,164 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:41,185 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 07:36:41,189 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:41,192 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 +2017-02-18 07:36:41,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 23 len: 27 to MEMORY +2017-02-18 07:36:41,207 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:41,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 +2017-02-18 07:36:41,223 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 07:36:41,236 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:41,239 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 +2017-02-18 07:36:41,241 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:41,242 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,243 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:41,245 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:41,245 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes +2017-02-18 07:36:41,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:41,250 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 100 bytes from disk +2017-02-18 07:36:41,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:41,253 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:41,254 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 07:36:41,259 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,313 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 07:36:41,315 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,316 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000005_0 is allowed to commit now +2017-02-18 07:36:41,317 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000005 +2017-02-18 07:36:41,324 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:41,331 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000005_0' done. +2017-02-18 07:36:41,333 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000005_0 +2017-02-18 07:36:41,333 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000006_0 +2017-02-18 07:36:41,357 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:41,358 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:41,359 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@14bddbc2 +2017-02-18 07:36:41,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:41,395 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:41,408 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 07:36:41,409 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:41,415 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 +2017-02-18 07:36:41,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 28 len: 32 to MEMORY +2017-02-18 07:36:41,429 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:41,432 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 +2017-02-18 07:36:41,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 07:36:41,460 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:41,461 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 +2017-02-18 07:36:41,461 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:41,462 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,462 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:41,463 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:41,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes +2017-02-18 07:36:41,469 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:41,470 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk +2017-02-18 07:36:41,470 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:41,470 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:41,471 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes +2017-02-18 07:36:41,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,525 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 07:36:41,529 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,542 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000006_0 is allowed to commit now +2017-02-18 07:36:41,544 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000006 +2017-02-18 07:36:41,548 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:41,550 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000006_0' done. +2017-02-18 07:36:41,551 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000006_0 +2017-02-18 07:36:41,551 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000007_0 +2017-02-18 07:36:41,560 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:41,561 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:41,566 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@709a7571 +2017-02-18 07:36:41,587 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:41,604 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:41,615 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 07:36:41,624 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:41,627 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 +2017-02-18 07:36:41,640 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 11 len: 15 to MEMORY +2017-02-18 07:36:41,651 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:41,654 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->99 +2017-02-18 07:36:41,659 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 07:36:41,671 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:41,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 99, usedMemory ->101 +2017-02-18 07:36:41,695 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:41,696 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,696 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:41,699 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:41,699 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes +2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk +2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:41,700 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:41,701 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 07:36:41,701 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,761 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 07:36:41,763 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,763 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000007_0 is allowed to commit now +2017-02-18 07:36:41,764 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000007 +2017-02-18 07:36:41,769 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:41,774 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000007_0' done. +2017-02-18 07:36:41,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000007_0 +2017-02-18 07:36:41,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000008_0 +2017-02-18 07:36:41,790 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:41,791 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:41,791 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6944e8c1 +2017-02-18 07:36:41,799 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:41,826 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:41,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 07:36:41,852 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:41,852 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 +2017-02-18 07:36:41,861 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 49 len: 53 to MEMORY +2017-02-18 07:36:41,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:41,873 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 +2017-02-18 07:36:41,886 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 07:36:41,890 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:41,892 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 +2017-02-18 07:36:41,903 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:41,904 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,904 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:41,905 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:41,905 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes +2017-02-18 07:36:41,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:41,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk +2017-02-18 07:36:41,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:41,918 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:41,918 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes +2017-02-18 07:36:41,934 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,962 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 80% +2017-02-18 07:36:41,982 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 07:36:41,984 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:41,984 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000008_0 is allowed to commit now +2017-02-18 07:36:41,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000008 +2017-02-18 07:36:41,995 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:41,999 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000008_0' done. +2017-02-18 07:36:42,002 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000008_0 +2017-02-18 07:36:42,004 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local378226183_0001_r_000009_0 +2017-02-18 07:36:42,011 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 07:36:42,034 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 07:36:42,035 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1419ce96 +2017-02-18 07:36:42,051 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 07:36:42,081 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local378226183_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 07:36:42,104 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local378226183_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 07:36:42,118 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local378226183_0001_m_000000_0 +2017-02-18 07:36:42,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 07:36:42,133 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local378226183_0001_m_000001_0 decomp: 13 len: 17 to MEMORY +2017-02-18 07:36:42,134 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local378226183_0001_m_000001_0 +2017-02-18 07:36:42,144 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->35 +2017-02-18 07:36:42,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local378226183_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 07:36:42,164 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local378226183_0001_m_000002_0 +2017-02-18 07:36:42,165 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37 +2017-02-18 07:36:42,165 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 07:36:42,166 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:42,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 07:36:42,168 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 07:36:42,179 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes +2017-02-18 07:36:42,180 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit +2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 37 bytes from disk +2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 07:36:42,181 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes +2017-02-18 07:36:42,189 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:42,235 INFO org.apache.hadoop.mapred.Task: Task:attempt_local378226183_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 07:36:42,237 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 07:36:42,237 INFO org.apache.hadoop.mapred.Task: Task attempt_local378226183_0001_r_000009_0 is allowed to commit now +2017-02-18 07:36:42,238 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local378226183_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q2.i/_temporary/0/task_local378226183_0001_r_000009 +2017-02-18 07:36:42,243 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 07:36:42,245 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local378226183_0001_r_000009_0' done. +2017-02-18 07:36:42,245 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local378226183_0001_r_000009_0 +2017-02-18 07:36:42,246 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 07:36:42,466 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local378226183_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 07:36:42,968 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 07:36:42,968 INFO org.apache.hadoop.mapreduce.Job: Job job_local378226183_0001 failed with state FAILED due to: NA +2017-02-18 07:36:43,236 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=324370280 + FILE: Number of bytes written=3632281 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Reduce input groups=77 + Reduce shuffle bytes=1471 + Reduce input records=131 + Reduce output records=77 + Spilled Records=262 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=1013 + Total committed heap usage (bytes)=2551959552 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=862 +2017-02-18 08:10:46,450 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 08:10:48,908 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 08:10:48,937 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 08:10:51,128 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 08:10:51,224 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 08:10:51,664 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 08:10:53,086 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2083140530_0001 +2017-02-18 08:10:54,977 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 08:10:54,988 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2083140530_0001 +2017-02-18 08:10:54,999 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 08:10:55,060 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:10:55,072 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 08:10:55,531 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 08:10:55,533 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:10:55,777 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:10:55,921 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:10:55,926 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 08:10:55,999 INFO org.apache.hadoop.mapreduce.Job: Job job_local2083140530_0001 running in uber mode : false +2017-02-18 08:10:56,004 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 08:10:57,831 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:10:57,831 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:10:57,832 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:10:57,929 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:10:58,049 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:11:01,985 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:02,018 INFO org.apache.hadoop.mapreduce.Job: map 1% reduce 0% +2017-02-18 08:11:04,989 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:05,027 INFO org.apache.hadoop.mapreduce.Job: map 5% reduce 0% +2017-02-18 08:11:07,994 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:08,039 INFO org.apache.hadoop.mapreduce.Job: map 11% reduce 0% +2017-02-18 08:11:10,996 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:11,048 INFO org.apache.hadoop.mapreduce.Job: map 16% reduce 0% +2017-02-18 08:11:14,000 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:14,037 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:11:14,044 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:11:14,045 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:11:14,046 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:11:14,046 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:11:14,052 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 08:11:17,004 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:20,010 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:23,012 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:26,014 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:29,016 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:32,024 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:35,034 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:38,040 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:40,745 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:11:40,810 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 08:11:40,824 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:11:40,827 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000000_0' done. +2017-02-18 08:11:40,832 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:11:40,834 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:11:40,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:11:40,841 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:11:40,861 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 08:11:41,154 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:11:41,280 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:11:41,288 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:11:41,289 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:11:41,290 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:11:41,290 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:11:41,298 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:11:41,312 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:11:45,711 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:11:45,730 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:11:45,731 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:11:45,732 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:11:45,733 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:11:46,174 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 08:11:46,865 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:47,176 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 08:11:49,867 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:52,868 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:11:53,319 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:11:53,334 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 08:11:53,341 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:11:53,346 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000001_0' done. +2017-02-18 08:11:53,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:11:53,348 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:11:53,363 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:11:53,364 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:11:53,381 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 08:11:53,771 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:11:53,780 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:11:53,781 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:11:53,782 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:11:53,782 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:11:53,795 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:11:53,802 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:11:54,199 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:11:56,809 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:11:56,821 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:11:56,822 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:11:56,823 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:11:56,823 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:11:57,211 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 08:11:59,395 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:12:00,220 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 08:12:02,396 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:12:02,603 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:12:02,632 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2083140530_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2083140530_0001_m_000002_0' done. +2017-02-18 08:12:02,651 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:02,654 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 08:12:02,808 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 08:12:02,815 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000000_0 +2017-02-18 08:12:02,925 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:02,926 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:02,964 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@79758369 +2017-02-18 08:12:03,119 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:03,152 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:03,245 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:12:03,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:12:03,505 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:03,516 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 +2017-02-18 08:12:03,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 08:12:03,556 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:03,557 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->92 +2017-02-18 08:12:03,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 30 len: 34 to MEMORY +2017-02-18 08:12:03,575 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:03,578 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 3, commitMemory -> 92, usedMemory ->122 +2017-02-18 08:12:03,580 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:03,581 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,581 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:03,649 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:03,650 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes +2017-02-18 08:12:03,656 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:03,661 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 122 bytes from disk +2017-02-18 08:12:03,685 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:03,685 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:03,686 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes +2017-02-18 08:12:03,688 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,785 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000001_0 +2017-02-18 08:12:03,810 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:03,820 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:03,820 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5b4e1b74 +2017-02-18 08:12:03,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:03,849 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:03,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:12:03,862 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:03,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 +2017-02-18 08:12:03,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 08:12:03,880 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:03,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->193 +2017-02-18 08:12:03,889 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 69 len: 73 to MEMORY +2017-02-18 08:12:03,903 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:03,909 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 3, commitMemory -> 193, usedMemory ->262 +2017-02-18 08:12:03,911 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:03,912 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:03,916 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:03,916 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes +2017-02-18 08:12:03,917 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:03,918 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 262 bytes from disk +2017-02-18 08:12:03,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:03,927 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:03,928 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes +2017-02-18 08:12:03,930 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:03,986 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000002_0 +2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:03,989 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a6a1f5b +2017-02-18 08:12:03,994 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,019 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,034 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:12:04,036 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,040 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 +2017-02-18 08:12:04,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 08:12:04,058 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,078 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->163 +2017-02-18 08:12:04,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 41 len: 45 to MEMORY +2017-02-18 08:12:04,087 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,088 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 3, commitMemory -> 163, usedMemory ->204 +2017-02-18 08:12:04,088 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,089 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,089 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,091 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,091 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes +2017-02-18 08:12:04,107 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 204 bytes from disk +2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,108 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,109 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes +2017-02-18 08:12:04,109 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,151 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000003_0 +2017-02-18 08:12:04,168 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,168 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,169 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7934bf83 +2017-02-18 08:12:04,175 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,196 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:12:04,215 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,225 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 +2017-02-18 08:12:04,227 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 08:12:04,232 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,241 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 +2017-02-18 08:12:04,246 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 11 len: 15 to MEMORY +2017-02-18 08:12:04,256 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,260 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 +2017-02-18 08:12:04,261 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,262 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,262 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,269 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,269 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes +2017-02-18 08:12:04,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk +2017-02-18 08:12:04,281 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,283 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,285 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes +2017-02-18 08:12:04,286 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,335 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000004_0 +2017-02-18 08:12:04,341 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,342 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,342 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4c122623 +2017-02-18 08:12:04,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,370 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:12:04,392 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,392 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 +2017-02-18 08:12:04,394 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 08:12:04,407 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->83 +2017-02-18 08:12:04,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 33 len: 37 to MEMORY +2017-02-18 08:12:04,435 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,435 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 3, commitMemory -> 83, usedMemory ->116 +2017-02-18 08:12:04,436 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,437 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,437 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,438 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,438 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes +2017-02-18 08:12:04,445 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,446 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes +2017-02-18 08:12:04,454 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,498 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000005_0 +2017-02-18 08:12:04,506 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,508 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,508 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7adb5354 +2017-02-18 08:12:04,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,540 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,552 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:12:04,566 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,566 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 +2017-02-18 08:12:04,576 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 08:12:04,581 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,586 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 +2017-02-18 08:12:04,589 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 23 len: 27 to MEMORY +2017-02-18 08:12:04,601 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 +2017-02-18 08:12:04,605 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,606 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,608 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,608 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes +2017-02-18 08:12:04,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 100 bytes from disk +2017-02-18 08:12:04,613 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,613 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,620 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 08:12:04,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,663 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000006_0 +2017-02-18 08:12:04,684 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,685 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,686 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4ab6b2ab +2017-02-18 08:12:04,692 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,706 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,719 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:12:04,724 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,746 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 +2017-02-18 08:12:04,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 08:12:04,760 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,760 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 +2017-02-18 08:12:04,762 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 28 len: 32 to MEMORY +2017-02-18 08:12:04,779 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,779 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 +2017-02-18 08:12:04,785 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,786 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,787 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,788 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,788 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes +2017-02-18 08:12:04,794 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk +2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,795 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,796 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes +2017-02-18 08:12:04,808 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,868 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000007_0 +2017-02-18 08:12:04,873 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:04,874 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:04,874 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2b2083d9 +2017-02-18 08:12:04,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:04,894 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:04,916 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:12:04,921 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:04,924 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 +2017-02-18 08:12:04,930 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 08:12:04,942 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:04,944 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->90 +2017-02-18 08:12:04,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 11 len: 15 to MEMORY +2017-02-18 08:12:04,955 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:04,972 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 90, usedMemory ->101 +2017-02-18 08:12:04,973 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:04,973 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:04,974 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:04,977 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:04,977 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes +2017-02-18 08:12:04,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:04,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk +2017-02-18 08:12:04,979 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:04,979 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:04,986 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 08:12:04,987 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,044 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000008_0 +2017-02-18 08:12:05,049 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:05,050 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:05,050 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@d62d2df +2017-02-18 08:12:05,055 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:05,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:05,085 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:12:05,096 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:05,097 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 +2017-02-18 08:12:05,100 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 08:12:05,105 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:05,109 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 +2017-02-18 08:12:05,112 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 49 len: 53 to MEMORY +2017-02-18 08:12:05,135 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:05,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 +2017-02-18 08:12:05,139 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:05,140 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:05,147 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:05,158 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes +2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk +2017-02-18 08:12:05,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:05,161 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:05,161 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes +2017-02-18 08:12:05,162 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,228 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2083140530_0001_r_000009_0 +2017-02-18 08:12:05,240 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:12:05,241 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:12:05,241 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4066a7f5 +2017-02-18 08:12:05,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:12:05,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2083140530_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:12:05,281 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 08:12:05,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local2083140530_0001_m_000000_0 +2017-02-18 08:12:05,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 08:12:05,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 08:12:05,316 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2083140530_0001_m_000002_0 +2017-02-18 08:12:05,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->24 +2017-02-18 08:12:05,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2083140530_0001_m_000001_0 decomp: 13 len: 17 to MEMORY +2017-02-18 08:12:05,339 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local2083140530_0001_m_000001_0 +2017-02-18 08:12:05,342 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 24, usedMemory ->37 +2017-02-18 08:12:05,346 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 08:12:05,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,347 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 08:12:05,349 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 08:12:05,350 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes +2017-02-18 08:12:05,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit +2017-02-18 08:12:05,354 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 37 bytes from disk +2017-02-18 08:12:05,357 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 08:12:05,359 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 08:12:05,360 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes +2017-02-18 08:12:05,368 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 08:12:05,420 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 08:12:05,476 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2083140530_0001 +java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CompressionCodec$Util.createOutputStreamWithCodecPool(CompressionCodec.java:131) + at org.apache.hadoop.io.compress.SnappyCodec.createOutputStream(SnappyCodec.java:99) + at org.apache.hadoop.mapreduce.lib.output.TextOutputFormat.getRecordWriter(TextOutputFormat.java:136) + at org.apache.hadoop.mapred.ReduceTask$NewTrackingRecordWriter.<init>(ReduceTask.java:540) + at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:614) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:12:06,253 INFO org.apache.hadoop.mapreduce.Job: Job job_local2083140530_0001 failed with state FAILED due to: NA +2017-02-18 08:12:06,525 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=838216 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Reduce input groups=0 + Reduce shuffle bytes=1471 + Reduce input records=0 + Reduce output records=0 + Spilled Records=131 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=951 + Total committed heap usage (bytes)=576008192 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=0 +2017-02-18 08:51:02,416 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 08:51:02,998 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 08:51:03,001 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 08:51:04,900 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 08:51:04,911 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 08:51:07,102 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 08:51:07,231 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 08:51:07,733 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 08:51:09,552 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local986354165_0001 +2017-02-18 08:51:11,300 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 08:51:11,301 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local986354165_0001 +2017-02-18 08:51:11,322 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 08:51:11,377 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:51:11,379 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 08:51:11,841 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 08:51:11,843 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000000_0 +2017-02-18 08:51:12,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:51:12,283 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:51:12,304 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 08:51:12,322 INFO org.apache.hadoop.mapreduce.Job: Job job_local986354165_0001 running in uber mode : false +2017-02-18 08:51:12,350 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:51:15,290 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:51:15,352 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:51:15,451 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:51:21,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:21,379 INFO org.apache.hadoop.mapreduce.Job: map 4% reduce 0% +2017-02-18 08:51:24,298 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:24,389 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% +2017-02-18 08:51:27,309 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:27,399 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 08:51:30,311 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:30,406 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% +2017-02-18 08:51:30,786 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:51:30,789 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:51:30,790 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:51:30,791 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:51:30,791 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:51:33,314 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:33,416 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 08:51:36,318 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:39,322 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:42,328 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:45,334 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:48,337 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:50,854 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 14765620(59062480) +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:51:50,855 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:51:51,341 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:54,345 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:51:57,347 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:00,348 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:03,356 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:06,360 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:09,366 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:09,648 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@f6a4c4a +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:09,699 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000001_0 +2017-02-18 08:52:09,706 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:52:09,707 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:52:09,710 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 08:52:11,414 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:52:11,482 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:52:11,494 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:52:11,551 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:52:12,367 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:15,633 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:15,716 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:52:16,551 INFO org.apache.hadoop.mapreduce.Job: map 35% reduce 0% +2017-02-18 08:52:17,568 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:52:17,571 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:52:18,723 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:19,559 INFO org.apache.hadoop.mapreduce.Job: map 44% reduce 0% +2017-02-18 08:52:21,725 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:23,449 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 22120620(88482480) +2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:23,463 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:52:23,464 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:52:24,726 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:27,732 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:28,910 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@4cb8cd94 +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:28,933 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local986354165_0001_m_000002_0 +2017-02-18 08:52:29,198 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:52:29,199 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:52:29,242 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 08:52:29,745 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:52:29,747 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:52:29,747 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:52:29,748 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:52:29,748 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:52:29,756 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:52:29,784 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:52:35,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:38,522 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:52:38,548 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:38,548 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:38,550 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:52:38,550 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:52:38,800 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:40,425 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:41,425 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 08:52:43,433 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:45,067 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:52:45,078 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 23042072(92168288) +2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:52:45,079 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:52:46,435 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:48,811 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@601a1ecd +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:48,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 08:52:49,063 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local986354165_0001 +java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 08:52:49,437 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:52:49,458 INFO org.apache.hadoop.mapreduce.Job: Job job_local986354165_0001 failed with state FAILED due to: NA +2017-02-18 08:52:49,693 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=70901752 + FILE: Number of bytes written=829530 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=0 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Spilled Records=0 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=11806 + Total committed heap usage (bytes)=1413275648 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 08:56:05,243 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 08:56:05,794 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 08:56:05,837 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 08:56:08,084 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 08:56:08,126 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 08:56:11,092 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 08:56:11,184 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 08:56:11,584 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 08:56:13,206 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1323619014_0001 +2017-02-18 08:56:15,020 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 08:56:15,022 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1323619014_0001 +2017-02-18 08:56:15,035 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 08:56:15,105 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:56:15,119 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 08:56:15,525 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 08:56:15,526 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000000_0 +2017-02-18 08:56:15,791 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:56:15,934 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:56:15,969 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 08:56:16,025 INFO org.apache.hadoop.mapreduce.Job: Job job_local1323619014_0001 running in uber mode : false +2017-02-18 08:56:16,028 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 08:56:16,562 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:56:16,568 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:56:16,569 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:56:16,569 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:56:16,570 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:56:16,609 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:56:16,664 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:56:21,875 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:22,086 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-18 08:56:24,895 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:25,136 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% +2017-02-18 08:56:27,896 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:28,148 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 08:56:30,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:31,158 INFO org.apache.hadoop.mapreduce.Job: map 21% reduce 0% +2017-02-18 08:56:31,565 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 08:56:31,574 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:56:31,575 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:56:31,576 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 08:56:31,576 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 08:56:33,907 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:34,166 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 08:56:36,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:39,916 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:42,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:45,925 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:48,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:50,241 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:50,247 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:51,938 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:52,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:52,099 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:52,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:52,933 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:53,960 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:53,965 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:54,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:54,329 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:54,752 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:54,785 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:54,941 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:56:55,192 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:55,203 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:55,743 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:55,745 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:56,127 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:56,137 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:56,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:56:56,912 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:56:57,187 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:56:57,253 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 08:56:57,256 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:56:57,261 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000000_0' done. +2017-02-18 08:56:57,262 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000000_0 +2017-02-18 08:56:57,263 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000001_0 +2017-02-18 08:56:57,270 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:56:57,271 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:56:57,273 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 08:56:57,664 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:56:57,669 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:56:57,670 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:56:57,670 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:56:57,671 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:56:57,678 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:56:57,702 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:56:58,254 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:57:01,849 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:57:01,863 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 08:57:01,864 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 08:57:02,264 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 08:57:03,300 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:04,281 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 08:57:06,304 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:07,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:07,567 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:07,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:07,728 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,017 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,021 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,230 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,232 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,382 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,388 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,528 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,548 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,700 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:08,852 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:08,893 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:09,003 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:09,020 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:09,263 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:09,270 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:09,309 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:09,394 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:57:09,411 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 08:57:09,416 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:57:09,419 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000001_0' done. +2017-02-18 08:57:09,420 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000001_0 +2017-02-18 08:57:09,421 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_m_000002_0 +2017-02-18 08:57:09,430 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:09,431 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:09,432 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 08:57:09,807 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 08:57:09,823 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 08:57:09,827 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 08:57:09,836 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 08:57:09,850 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 08:57:10,305 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:57:12,866 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 08:57:12,871 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 08:57:13,315 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 08:57:15,444 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:16,322 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 08:57:17,038 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,042 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,180 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,341 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,558 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,563 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,684 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,820 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,847 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:17,931 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:17,969 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,111 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:18,127 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,225 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:18,239 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,449 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 08:57:18,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 08:57:18,472 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 08:57:18,520 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 08:57:18,583 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1323619014_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 08:57:18,590 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 08:57:18,594 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1323619014_0001_m_000002_0' done. +2017-02-18 08:57:18,595 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1323619014_0001_m_000002_0 +2017-02-18 08:57:18,597 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 08:57:18,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 08:57:18,692 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000000_0 +2017-02-18 08:57:18,737 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:18,738 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:18,760 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3a6d125b +2017-02-18 08:57:18,891 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:18,919 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,290 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000001_0 +2017-02-18 08:57:19,296 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,297 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,297 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3115d34f +2017-02-18 08:57:19,298 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,299 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,301 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,308 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,315 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,317 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,339 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,340 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 08:57:19,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,358 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,349 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,363 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,375 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000002_0 +2017-02-18 08:57:19,384 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,385 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,385 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4f3ed32a +2017-02-18 08:57:19,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,393 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,402 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,422 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,424 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,434 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,424 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,443 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,443 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,444 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,447 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,458 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,459 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,445 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,463 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,468 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000003_0 +2017-02-18 08:57:19,482 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,483 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,483 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,485 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,487 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,495 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,494 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,496 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,496 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4edb440a +2017-02-18 08:57:19,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,500 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,488 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,516 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,516 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,518 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,526 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,528 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,529 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,520 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,547 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,547 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,548 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,550 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,550 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,551 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,551 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,554 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,555 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,567 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,568 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,570 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,576 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,603 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,605 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,599 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,618 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,618 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,619 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,620 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,621 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,622 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,622 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,582 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,623 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,624 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,653 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000004_0 +2017-02-18 08:57:19,652 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,665 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,666 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,653 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,664 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,674 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,674 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@448d87fc +2017-02-18 08:57:19,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,685 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,687 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,687 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,691 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,694 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,694 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,685 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,697 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,698 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,698 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,700 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,714 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,715 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,714 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,713 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,735 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,737 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,709 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,738 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,729 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,739 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,754 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,756 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,750 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,749 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,781 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,785 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,783 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,790 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,791 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,792 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,792 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,782 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,793 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,786 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,785 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000005_0 +2017-02-18 08:57:19,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,804 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,826 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,821 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,833 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,835 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,818 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,837 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,837 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,838 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,838 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,839 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,839 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,817 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,833 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:19,845 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:19,846 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@68f559e5 +2017-02-18 08:57:19,847 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,847 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,863 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,865 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,862 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:19,855 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,881 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,881 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,852 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,849 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,889 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,891 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,893 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,893 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,895 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,895 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,896 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,897 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:19,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,896 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,918 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,918 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,919 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,919 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,921 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,921 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,922 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,922 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,911 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,951 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,953 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,953 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,954 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,955 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,956 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,957 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,959 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,959 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,961 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,961 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,962 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:19,962 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:19,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,011 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,011 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:19,965 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,012 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,013 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,014 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,015 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,016 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,017 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,017 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,019 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,019 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,020 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,020 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,021 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,021 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:19,964 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,023 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:19,912 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,025 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,026 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,029 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,031 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:19,933 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:19,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,063 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,064 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,065 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,065 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,067 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,067 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,068 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,068 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,069 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,069 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,070 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,072 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,113 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,115 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,075 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,116 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,074 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000006_0 +2017-02-18 08:57:20,074 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,073 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,073 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,140 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,142 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,142 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,140 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,144 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,145 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,147 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,147 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,148 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,148 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,149 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,149 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,139 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,151 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,152 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,138 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,166 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,167 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,138 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,169 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,138 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,170 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,170 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@35fbcbad +2017-02-18 08:57:20,126 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,171 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,153 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,172 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,173 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,176 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,187 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,190 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,191 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,190 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,204 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,205 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,206 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,207 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,207 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,212 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,213 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,214 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,215 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,215 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,188 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,217 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,223 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,238 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,237 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,243 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,245 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,245 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,246 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,248 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,248 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,249 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,249 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,250 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,250 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,252 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,236 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,327 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,328 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,330 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,331 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,333 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,335 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,301 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,300 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,252 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,338 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,339 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000007_0 +2017-02-18 08:57:20,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,342 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,343 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,345 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,346 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,360 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,377 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,377 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4e2d66d5 +2017-02-18 08:57:20,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,383 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,384 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,385 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,385 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,387 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,388 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,391 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,389 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,411 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,411 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,414 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,401 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,400 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,415 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,416 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,416 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,417 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,418 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,399 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,398 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,427 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,428 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,468 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,470 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,462 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,487 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,489 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,489 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,490 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,490 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,492 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,494 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,494 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,495 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,495 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,497 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,497 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,498 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,499 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,500 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,501 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,501 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,503 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,459 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,504 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,506 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,506 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,507 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,507 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,512 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,458 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,512 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,515 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,515 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,517 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,517 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,518 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,519 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,477 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,471 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,554 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,556 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,556 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,557 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,560 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,562 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,563 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000008_0 +2017-02-18 08:57:20,565 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,565 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,567 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,542 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,581 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,534 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,582 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,519 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,580 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,584 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,585 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@239de86 +2017-02-18 08:57:20,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,586 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,592 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,608 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,609 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,611 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,611 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,612 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,589 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,587 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,608 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,606 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,627 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,633 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,633 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,641 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,642 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,644 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,668 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,654 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,697 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,699 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,701 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,649 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,702 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,704 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,704 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,705 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,705 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,707 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,707 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,648 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,647 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,647 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,645 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,645 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,711 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,712 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:57:20,714 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1323619014_0001_r_000009_0 +2017-02-18 08:57:20,718 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,723 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 08:57:20,724 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 08:57:20,725 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@236e4a57 +2017-02-18 08:57:20,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 08:57:20,736 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,746 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,746 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,748 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,779 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,776 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1323619014_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 08:57:20,767 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,813 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,815 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,767 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,816 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,766 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,817 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,765 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,818 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,764 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,818 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,763 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,819 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,763 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,820 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:57:20,762 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,821 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,786 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 08:57:20,824 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,830 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,831 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 08:57:20,833 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,834 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,835 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,835 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,837 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,866 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 08:57:20,866 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,848 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 08:57:20,847 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,846 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 08:57:20,845 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 08:57:20,845 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 08:57:20,843 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,877 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 08:57:20,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 08:57:20,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 08:57:20,878 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 08:57:20,879 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 08:57:20,880 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1323619014_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 08:57:20,893 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1323619014_0001 +java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.io.IOException: not a gzip file + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) + at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) + at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) + at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) + at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) +2017-02-18 08:57:21,357 INFO org.apache.hadoop.mapreduce.Job: Job job_local1323619014_0001 failed with state FAILED due to: NA +2017-02-18 08:57:21,653 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=838861 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Spilled Records=131 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=740 + Total committed heap usage (bytes)=576008192 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:08:49,683 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:08:50,130 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:08:50,137 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:08:51,720 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:08:51,730 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:08:53,605 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:08:53,644 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:08:54,021 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:08:55,329 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local996311227_0001 +2017-02-18 09:08:57,090 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:08:57,092 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local996311227_0001 +2017-02-18 09:08:57,103 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:08:57,143 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:08:57,159 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:08:57,575 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:08:57,576 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000000_0 +2017-02-18 09:08:57,771 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:08:57,879 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:08:57,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:08:58,340 INFO org.apache.hadoop.mapreduce.Job: Job job_local996311227_0001 running in uber mode : false +2017-02-18 09:08:58,347 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:08:58,596 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:08:58,596 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:08:58,597 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:08:58,610 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:08:58,611 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:08:58,647 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:08:58,679 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:09:03,869 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:04,390 INFO org.apache.hadoop.mapreduce.Job: map 3% reduce 0% +2017-02-18 09:09:06,885 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:07,419 INFO org.apache.hadoop.mapreduce.Job: map 9% reduce 0% +2017-02-18 09:09:09,890 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:10,424 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 09:09:12,894 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:13,244 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:09:13,250 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:09:13,251 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:09:13,252 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:09:13,252 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:09:13,444 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:09:15,897 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:18,901 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:21,904 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:24,905 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:27,909 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:30,913 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:31,712 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:31,734 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:33,465 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:33,473 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:33,917 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:34,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:34,188 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:35,151 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:35,178 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:35,531 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:35,550 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:35,999 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:36,009 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:36,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:36,392 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:36,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:36,927 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:36,939 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:37,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:37,323 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:38,170 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:38,184 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:38,475 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:09:38,501 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 09:09:38,509 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:09:38,519 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000000_0' done. +2017-02-18 09:09:38,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000000_0 +2017-02-18 09:09:38,521 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000001_0 +2017-02-18 09:09:38,528 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:09:38,529 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:09:38,530 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:09:38,533 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:09:38,948 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:09:38,955 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:09:38,956 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:09:38,956 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:09:38,957 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:09:38,963 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:09:38,973 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:09:43,485 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:09:43,506 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:09:43,507 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:09:43,562 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 09:09:44,574 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:45,580 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 09:09:47,578 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:49,441 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:49,455 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:49,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:49,639 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:49,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:49,941 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,166 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,188 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,368 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,504 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,519 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,582 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:50,676 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,676 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:50,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:50,936 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:51,060 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:51,065 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:51,325 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:51,334 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:51,476 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:09:51,490 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 09:09:51,503 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:09:51,515 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000001_0' done. +2017-02-18 09:09:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000001_0 +2017-02-18 09:09:51,516 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_m_000002_0 +2017-02-18 09:09:51,529 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:09:51,530 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:09:51,543 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:09:51,816 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:09:51,966 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:09:51,975 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:09:51,976 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:09:51,976 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:09:51,977 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:09:51,982 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:09:51,984 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:09:54,908 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:09:54,916 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:09:54,917 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:09:54,917 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:09:54,918 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:09:55,827 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:09:57,551 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:09:57,840 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 09:09:59,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,121 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,253 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,416 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,606 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,714 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,730 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,861 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,865 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:09:59,988 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:09:59,989 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:10:00,146 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,228 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:10:00,241 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,468 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:10:00,470 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:10:00,518 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:10:00,555 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:10:00,584 INFO org.apache.hadoop.mapred.Task: Task:attempt_local996311227_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 09:10:00,586 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:10:00,586 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local996311227_0001_m_000002_0' done. +2017-02-18 09:10:00,587 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local996311227_0001_m_000002_0 +2017-02-18 09:10:00,587 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:10:00,680 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 09:10:00,681 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000000_0 +2017-02-18 09:10:00,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:00,728 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:00,749 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@10c8fecc +2017-02-18 09:10:00,843 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:10:00,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:00,891 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,153 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,253 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000001_0 +2017-02-18 09:10:01,258 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,259 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,260 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@553beecf +2017-02-18 09:10:01,262 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,262 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,264 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,272 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,272 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,274 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,284 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,286 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,295 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,296 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,304 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,306 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,302 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,290 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,327 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000002_0 +2017-02-18 09:10:01,336 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,337 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,337 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@605afad3 +2017-02-18 09:10:01,343 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,344 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,345 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,348 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,349 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,355 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,366 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,366 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,383 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,378 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,376 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,404 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,405 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,406 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,385 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,408 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000003_0 +2017-02-18 09:10:01,421 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,423 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,423 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,424 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,424 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,426 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,426 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,427 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,427 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,428 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,428 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,430 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,431 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,433 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,434 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,434 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,437 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,438 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,440 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,446 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@673f2af1 +2017-02-18 09:10:01,455 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,456 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,458 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,464 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,466 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,474 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,508 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,481 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,513 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,516 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,517 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000004_0 +2017-02-18 09:10:01,521 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,514 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,538 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,543 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,544 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,536 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,558 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,558 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3a668605 +2017-02-18 09:10:01,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,549 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,543 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,562 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,562 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,564 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,564 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,565 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,575 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,568 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,582 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,582 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,584 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,584 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,585 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,585 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,586 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,586 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,566 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,588 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,589 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,589 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,591 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,592 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,594 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,615 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,617 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,595 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,627 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,629 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,629 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,630 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,631 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,627 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,637 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,638 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,640 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,659 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,653 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,661 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,663 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,650 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,679 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,648 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,642 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,682 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,683 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,683 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,690 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,697 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,696 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000005_0 +2017-02-18 09:10:01,696 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,710 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,729 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,729 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,693 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,731 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,733 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,733 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,734 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,734 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,735 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,737 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,738 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,739 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,739 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,727 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,744 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,745 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,791 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,796 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,797 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,795 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,798 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,799 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,794 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,799 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,800 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,801 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,793 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,802 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6349766d +2017-02-18 09:10:01,792 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,803 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,804 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,805 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,806 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,812 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,819 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,820 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,808 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,832 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,833 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,807 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,807 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,842 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,807 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,844 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,844 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,846 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,846 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,852 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,854 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,841 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,868 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,869 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,870 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,867 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:01,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,882 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,882 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,883 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,885 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,885 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,886 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,854 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,878 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,870 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,907 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,909 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,909 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,910 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,910 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,911 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000006_0 +2017-02-18 09:10:01,912 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,913 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,925 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,926 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,927 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,928 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,929 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,930 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,930 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:01,936 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,936 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,938 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,940 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,940 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,942 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,943 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,943 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,944 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,945 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,946 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,949 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,949 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,956 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:01,956 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:01,957 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4260af0b +2017-02-18 09:10:01,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,968 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:01,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,983 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:01,984 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,976 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,975 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:01,974 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,992 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,995 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:01,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,997 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:01,993 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,000 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,002 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:01,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,026 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,027 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,016 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,015 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,029 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,015 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,011 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,027 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,033 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,042 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,047 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,074 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,057 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,056 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,052 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,050 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,049 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,049 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000007_0 +2017-02-18 09:10:02,077 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,095 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,096 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,095 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,102 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,108 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,094 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,115 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,118 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,119 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,120 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,092 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,123 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,125 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,126 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,126 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,127 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,114 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,173 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,174 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,099 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,175 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,098 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,177 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,177 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,178 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,180 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,156 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:02,181 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:02,182 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@74eef9db +2017-02-18 09:10:02,183 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,184 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,192 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:02,193 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,194 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,204 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,205 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,205 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,206 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,208 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,209 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,210 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,188 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,211 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,186 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,185 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,212 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,213 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,199 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,216 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,226 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,256 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,258 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,267 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,264 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,268 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,269 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,263 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,269 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,262 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,271 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,273 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,273 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,274 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,274 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,260 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,275 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,259 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,275 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,276 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000008_0 +2017-02-18 09:10:02,282 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,283 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,294 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:02,304 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:02,305 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@eb9012c +2017-02-18 09:10:02,307 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,316 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,318 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,318 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,337 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,316 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,362 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,364 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,364 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,365 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,315 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,368 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,370 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,313 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,372 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,373 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,374 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,374 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,375 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,376 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,377 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,312 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,379 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,380 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,380 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,381 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,310 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,382 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,309 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:02,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,387 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,389 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,389 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,391 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,392 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,394 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,407 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,409 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,407 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,410 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,403 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,434 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,434 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,438 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,442 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,443 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,444 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,430 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,425 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,414 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,451 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,452 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,453 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,453 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,454 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,454 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,456 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,456 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,457 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,457 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,459 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,459 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,460 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,537 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,537 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,539 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,541 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,541 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,541 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,542 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,542 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,544 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,568 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,570 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,559 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,590 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,590 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,554 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,593 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,594 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,553 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,595 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,596 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,596 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,597 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,598 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,599 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,599 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,600 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,601 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,553 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,601 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,603 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,603 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,604 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,606 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,606 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,552 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,549 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,548 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local996311227_0001_r_000009_0 +2017-02-18 09:10:02,548 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,609 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,545 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,594 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,642 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:10:02,643 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:10:02,643 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@45b028db +2017-02-18 09:10:02,657 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:10:02,662 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,668 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,669 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,669 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,667 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,671 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,673 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,674 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,675 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,675 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,677 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,678 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,665 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,664 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,663 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,681 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,691 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,692 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,716 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,724 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,725 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,723 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,726 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,728 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,722 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,728 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,730 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,721 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,730 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,733 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,751 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,753 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,753 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,755 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,755 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,756 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,756 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,758 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,758 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,759 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,759 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,718 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,761 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,717 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,763 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,765 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,765 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,766 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,766 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,768 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,768 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,769 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,769 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,771 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,735 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,726 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,833 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 28 len: 32 to MEMORY +2017-02-18 09:10:02,841 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local996311227_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:10:02,842 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,843 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,844 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,845 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,848 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,856 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,857 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 09:10:02,849 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,871 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 49 len: 53 to MEMORY +2017-02-18 09:10:02,855 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,872 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 11 len: 15 to MEMORY +2017-02-18 09:10:02,854 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,872 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 39 len: 43 to MEMORY +2017-02-18 09:10:02,853 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,873 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 51 len: 55 to MEMORY +2017-02-18 09:10:02,851 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,873 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 23 len: 27 to MEMORY +2017-02-18 09:10:02,851 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,874 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 2 len: 6 to MEMORY +2017-02-18 09:10:02,850 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,874 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,862 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:10:02,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local996311227_0001_m_000002_0 decomp: 31 len: 35 to MEMORY +2017-02-18 09:10:02,876 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local996311227_0001 +java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.io.IOException: not a gzip file + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) + at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) + at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) + at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) + at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) +2017-02-18 09:10:03,867 INFO org.apache.hadoop.mapreduce.Job: Job job_local996311227_0001 failed with state FAILED due to: NA +2017-02-18 09:10:04,136 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=834451 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Spilled Records=131 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=847 + Total committed heap usage (bytes)=576008192 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:26:49,408 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:26:49,749 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:26:49,775 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:26:51,050 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:26:51,065 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:26:52,559 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:26:52,648 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:26:52,917 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:26:53,914 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local751599384_0001 +2017-02-18 09:26:55,334 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:26:55,335 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local751599384_0001 +2017-02-18 09:26:55,353 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:26:55,413 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:26:55,421 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:26:55,831 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:26:55,832 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000000_0 +2017-02-18 09:26:56,054 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:26:56,169 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:26:56,178 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:26:56,622 INFO org.apache.hadoop.mapreduce.Job: Job job_local751599384_0001 running in uber mode : false +2017-02-18 09:26:56,624 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:26:56,926 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:26:56,956 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:26:57,006 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:27:02,132 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:02,662 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-18 09:27:05,153 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:05,670 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 09:27:08,158 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:08,237 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:27:08,244 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:27:08,245 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:27:08,245 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:27:08,246 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:27:08,689 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:27:11,159 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:14,169 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:17,178 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:20,180 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:22,469 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:22,475 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:23,187 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:24,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:24,134 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:24,753 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:24,755 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:25,552 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:25,553 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:25,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:25,892 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:26,203 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:26,249 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:26,269 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:26,579 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:26,583 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:26,992 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:27,005 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:27,304 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:27,308 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:27,911 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:27,916 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:28,128 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:27:28,155 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 09:27:28,161 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:27:28,164 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000000_0' done. +2017-02-18 09:27:28,166 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000000_0 +2017-02-18 09:27:28,167 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000001_0 +2017-02-18 09:27:28,174 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:28,175 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:28,177 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:27:28,463 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:27:28,474 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:27:28,475 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:27:28,475 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:27:28,476 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:27:28,482 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:27:28,494 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:27:28,789 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:27:31,526 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:27:31,534 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:27:31,535 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:27:31,535 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:27:31,536 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:27:31,805 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 09:27:34,203 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:34,817 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 09:27:35,818 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:35,836 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:35,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:35,952 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,167 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,176 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,302 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,330 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,439 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,444 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,566 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,573 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,670 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,693 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,829 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,837 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:36,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:36,932 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:37,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:37,127 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:37,198 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:27:37,205 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:37,217 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 09:27:37,222 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:27:37,224 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000001_0' done. +2017-02-18 09:27:37,225 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000001_0 +2017-02-18 09:27:37,226 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_m_000002_0 +2017-02-18 09:27:37,233 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:37,238 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:37,250 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:27:37,543 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:27:37,547 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:27:37,548 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:27:37,554 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:27:37,556 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:27:37,828 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:27:39,830 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:27:39,852 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:27:39,853 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:27:39,854 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:27:39,854 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:27:40,851 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:27:42,899 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:42,902 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,006 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,018 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,164 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,164 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,270 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:27:43,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,372 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,467 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,482 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,607 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,635 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,736 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,746 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,863 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 09:27:43,872 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:43,896 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:43,981 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:44,003 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:44,206 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.gz] +2017-02-18 09:27:44,209 WARN org.apache.hadoop.mapred.IFile: Could not obtain compressor from CodecPool +2017-02-18 09:27:44,256 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:27:44,279 INFO org.apache.hadoop.mapred.Task: Task:attempt_local751599384_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 09:27:44,285 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:27:44,288 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local751599384_0001_m_000002_0' done. +2017-02-18 09:27:44,289 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local751599384_0001_m_000002_0 +2017-02-18 09:27:44,290 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:27:44,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 09:27:44,365 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000000_0 +2017-02-18 09:27:44,412 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,414 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,442 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@53133050 +2017-02-18 09:27:44,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,568 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:44,731 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,763 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,802 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000001_0 +2017-02-18 09:27:44,808 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,809 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,809 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2ee007c9 +2017-02-18 09:27:44,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,814 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,815 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,816 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,816 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,817 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,820 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,827 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,837 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,841 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,837 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:44,853 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,860 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,866 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:27:44,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,872 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000002_0 +2017-02-18 09:27:44,874 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,877 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,884 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,887 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,887 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,888 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,890 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,891 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,884 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,893 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,893 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2912ee3c +2017-02-18 09:27:44,896 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,897 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,903 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,904 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,905 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,911 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,919 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,929 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,927 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:44,924 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,939 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,940 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,940 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,941 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,944 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,933 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:44,961 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000003_0 +2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:44,969 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@584ca76 +2017-02-18 09:27:44,971 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,972 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:44,973 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,977 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,995 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,995 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,996 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,996 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,997 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,998 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,999 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:44,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,000 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,000 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,001 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,002 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:44,977 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:44,977 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,004 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,005 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,005 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,006 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,007 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,008 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,008 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,009 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,009 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,010 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,011 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,012 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,055 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,056 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,055 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,033 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,013 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,058 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,059 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,060 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000004_0 +2017-02-18 09:27:45,061 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,070 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,068 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7490b4d0 +2017-02-18 09:27:45,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,074 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,070 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,077 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,080 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,080 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,080 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,081 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,078 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,086 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,089 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,089 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,090 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,100 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,101 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,091 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,108 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,094 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,093 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,109 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,110 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,111 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,111 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,112 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,114 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,114 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,131 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,134 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,124 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,140 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,140 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,141 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,145 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,135 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,147 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,148 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,158 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,157 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,174 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,176 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,156 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,154 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,150 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,178 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,179 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,180 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,182 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,174 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,202 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000005_0 +2017-02-18 09:27:45,196 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,191 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,190 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,203 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,208 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,211 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,222 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,224 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,224 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,225 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,225 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,226 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,226 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,228 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,228 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,229 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,229 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,210 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,209 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,231 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,232 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,233 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,234 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,234 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,235 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,236 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,243 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,251 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,251 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,252 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,253 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3e35379c +2017-02-18 09:27:45,255 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,255 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,257 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,237 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,265 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,278 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,280 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,280 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,281 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,281 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,282 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,282 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,283 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,284 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,285 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,285 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,286 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,286 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,287 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,289 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,289 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,290 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,305 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,300 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,328 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,329 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,330 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,330 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,331 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,331 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,299 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,332 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,334 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,334 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,335 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,335 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,337 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,337 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,291 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,338 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,340 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,341 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,341 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,291 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,342 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,343 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,346 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,353 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,358 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,356 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000006_0 +2017-02-18 09:27:45,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,373 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,355 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,385 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,386 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,386 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,388 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,384 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,389 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,391 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,384 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,394 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,395 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,395 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,396 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,396 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,397 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,398 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,383 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,400 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,401 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,401 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,383 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,369 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,377 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,403 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,403 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,410 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,417 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,425 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,422 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,430 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,432 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,432 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,421 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,432 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@43e0ff38 +2017-02-18 09:27:45,420 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,433 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,435 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,436 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,436 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,418 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,438 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,429 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,439 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,440 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,460 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,465 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,464 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,464 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,472 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,474 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,474 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,475 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,475 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,477 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,477 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,478 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,478 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,479 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,480 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,481 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,481 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,463 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,484 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,462 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,485 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,461 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,485 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,486 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,507 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,537 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,540 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,540 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,546 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,548 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,550 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,550 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,539 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,552 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,553 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,571 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,571 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,574 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,591 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,593 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,584 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,604 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,578 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,605 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,607 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,607 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,608 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,608 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,610 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,610 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,611 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,611 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,577 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,612 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,613 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,613 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,576 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,614 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,616 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,617 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,617 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,619 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,619 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,620 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,620 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,622 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,575 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,673 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,575 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,675 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,676 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,676 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,677 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,679 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,679 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,680 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,680 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,682 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000007_0 +2017-02-18 09:27:45,684 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,686 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,666 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,637 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,634 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,692 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,693 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,693 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,695 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,695 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,696 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,696 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,632 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,698 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,700 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,700 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,701 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,701 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,690 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,703 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,703 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@77e7c326 +2017-02-18 09:27:45,686 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,705 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,706 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,707 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,706 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,709 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,710 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,712 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,713 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,720 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,721 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,720 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,738 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,740 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,741 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,719 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,717 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:45,713 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,743 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,743 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,744 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,744 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,755 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,775 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,777 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,762 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,785 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,786 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,787 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,787 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,788 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,789 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,790 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,790 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,791 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,791 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,761 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,792 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,793 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,760 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,758 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,757 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,795 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,797 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,784 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,797 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,798 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,875 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,783 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:45,880 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,861 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,888 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,888 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,890 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,917 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,918 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000008_0 +2017-02-18 09:27:45,920 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,886 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,920 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,884 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,922 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,923 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,923 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,925 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,925 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,883 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,926 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,928 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,928 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,929 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,929 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,931 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,931 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,932 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,904 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,891 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,935 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,937 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,937 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,939 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,939 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,941 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,941 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,942 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,946 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,935 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,934 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,947 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,952 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,958 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,958 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,960 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,957 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,960 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,961 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,962 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:45,964 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,956 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,972 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,955 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:45,975 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:45,975 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7659657a +2017-02-18 09:27:45,955 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,976 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,977 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,977 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,979 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,979 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,980 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,980 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:45,954 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,982 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,983 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,983 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,985 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:45,952 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,986 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,987 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,988 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,989 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,989 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,991 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,991 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:45,968 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,992 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:45,963 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:45,963 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:45,993 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:45,993 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:45,993 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:46,023 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,030 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,030 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,032 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,034 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,029 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,034 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,036 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,028 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,036 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,037 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,038 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,039 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,039 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,040 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,041 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,026 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,042 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,043 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,045 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,045 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,025 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,046 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,048 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,048 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,049 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,049 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,051 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,051 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,052 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,053 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,054 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,058 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,059 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,060 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,075 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,076 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,105 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,105 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,115 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,104 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,116 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,117 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,103 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,118 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,119 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,120 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,121 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,121 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,102 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,122 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,124 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,124 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,125 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,125 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,127 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,127 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,128 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,128 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,101 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,100 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,099 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,080 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:46,107 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,175 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,179 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,180 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,181 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,181 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,184 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,184 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,186 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,186 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,187 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,187 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,189 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,189 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,176 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,194 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,194 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,195 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,196 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,197 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,199 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,199 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,200 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,200 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,202 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,176 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,254 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,256 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,256 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,257 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,258 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,258 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,260 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,260 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,261 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,175 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,235 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,263 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,223 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,263 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,264 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,203 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,265 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,202 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local751599384_0001_r_000009_0 +2017-02-18 09:27:46,267 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,286 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:27:46,287 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:27:46,287 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@18084038 +2017-02-18 09:27:46,291 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:27:46,305 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local751599384_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:27:46,320 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,320 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,322 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,323 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,324 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,324 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,325 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,325 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,326 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,326 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,327 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,328 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,333 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,333 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,345 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,350 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,352 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,353 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,353 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,354 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,354 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,356 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,356 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,357 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,357 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,358 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,360 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,360 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,361 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,349 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,362 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,363 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,363 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,364 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,365 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,366 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,366 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,367 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,367 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,368 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,368 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,370 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,348 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,371 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,371 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,346 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,413 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,373 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,413 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,351 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,414 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,415 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,415 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,416 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,416 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,419 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,419 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,420 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,420 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,421 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,421 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,509 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,509 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,510 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,510 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,511 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,518 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 52 len: 56 to MEMORY +2017-02-18 09:27:46,522 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,523 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,524 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,524 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,525 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,525 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 154 len: 158 to MEMORY +2017-02-18 09:27:46,520 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,526 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 22 len: 26 to MEMORY +2017-02-18 09:27:46,527 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 09:27:46,530 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local751599384_0001 +java.lang.Exception: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:556) +Caused by: org.apache.hadoop.mapreduce.task.reduce.Shuffle$ShuffleError: error in shuffle in localfetcher#1 + at org.apache.hadoop.mapreduce.task.reduce.Shuffle.run(Shuffle.java:134) + at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:376) + at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:346) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +Caused by: java.io.IOException: not a gzip file + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.processBasicHeader(BuiltInGzipDecompressor.java:496) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.executeHeaderState(BuiltInGzipDecompressor.java:257) + at org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.decompress(BuiltInGzipDecompressor.java:186) + at org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:91) + at org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:85) + at org.apache.hadoop.io.IOUtils.readFully(IOUtils.java:199) + at org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput.shuffle(InMemoryMapOutput.java:97) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.copyMapOutput(LocalFetcher.java:157) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.doCopy(LocalFetcher.java:102) + at org.apache.hadoop.mapreduce.task.reduce.LocalFetcher.run(LocalFetcher.java:85) +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,557 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 98 len: 102 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 112 len: 116 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,558 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 63 len: 67 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 61 len: 65 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 54 len: 58 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 88 len: 92 to MEMORY +2017-02-18 09:27:46,538 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.gz] +2017-02-18 09:27:46,560 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local751599384_0001_m_000000_0 decomp: 72 len: 76 to MEMORY +2017-02-18 09:27:46,912 INFO org.apache.hadoop.mapreduce.Job: Job job_local751599384_0001 failed with state FAILED due to: NA +2017-02-18 09:27:47,155 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=63678066 + FILE: Number of bytes written=834451 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=1471 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Spilled Records=131 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=663 + Total committed heap usage (bytes)=576008192 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:32:52,271 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:32:52,676 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:32:52,678 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:32:53,961 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:32:53,971 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:32:55,424 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:32:55,476 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:32:55,773 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:32:56,770 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local963140535_0001 +2017-02-18 09:32:58,243 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:32:58,245 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local963140535_0001 +2017-02-18 09:32:58,257 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:32:58,292 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:32:58,293 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:32:58,640 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:32:58,641 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000000_0 +2017-02-18 09:32:58,823 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:32:58,889 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:32:58,893 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:32:59,248 INFO org.apache.hadoop.mapreduce.Job: Job job_local963140535_0001 running in uber mode : false +2017-02-18 09:32:59,269 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:32:59,522 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:32:59,538 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:32:59,539 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:32:59,569 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:32:59,582 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:33:04,889 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:05,319 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-18 09:33:07,900 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:08,340 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 09:33:10,903 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:10,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:33:10,937 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:10,938 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:10,939 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:33:10,939 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:33:11,347 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:33:13,912 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:16,917 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:19,923 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:22,930 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:25,935 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:27,123 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 14765620(59062480) +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:33:27,129 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:33:28,944 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:31,946 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:34,957 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:37,968 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:40,042 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@f6a4c4a +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:33:40,081 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000001_0 +2017-02-18 09:33:40,091 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:33:40,091 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:33:40,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:33:40,579 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:33:40,589 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:33:40,592 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:33:40,594 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:33:40,595 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:33:40,602 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:33:40,611 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:33:40,971 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:43,769 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:33:43,774 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:33:43,979 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:46,116 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:46,528 INFO org.apache.hadoop.mapreduce.Job: map 44% reduce 0% +2017-02-18 09:33:47,986 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:47,992 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 22120620(88482480) +2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:33:47,993 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:33:49,120 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:51,563 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@e5c48f1 +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:33:51,577 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local963140535_0001_m_000002_0 +2017-02-18 09:33:51,588 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:33:51,595 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:33:51,599 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:33:51,954 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:33:51,962 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:33:51,962 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:33:51,963 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:33:51,963 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:33:51,973 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:33:51,974 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:33:52,131 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:54,795 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:33:54,806 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:33:55,134 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:57,609 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:33:57,930 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:33:57,940 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:33:57,945 INFO org.apache.hadoop.mapred.MapTask: (RESET) equator 0 kv 26214396(104857584) kvi 23042072(92168288) +2017-02-18 09:33:57,946 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:33:57,947 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:33:57,947 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:34:00,613 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:34:00,657 INFO org.apache.hadoop.mapred.MapTask: Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@2d14b355 +java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.closeQuietly(MapTask.java:2016) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:797) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:34:00,664 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:34:00,673 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local963140535_0001 +java.lang.Exception: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support. + at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) + at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150) + at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:114) + at org.apache.hadoop.mapred.IFile$Writer.<init>(IFile.java:97) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1606) + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1486) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 09:34:00,943 INFO org.apache.hadoop.mapreduce.Job: Job job_local963140535_0001 failed with state FAILED due to: NA +2017-02-18 09:34:01,055 INFO org.apache.hadoop.mapreduce.Job: Counters: 18 + File System Counters + FILE: Number of bytes read=73722745 + FILE: Number of bytes written=829530 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=0 + Input split bytes=351 + Combine input records=0 + Combine output records=0 + Spilled Records=0 + Failed Shuffles=0 + Merged Map outputs=0 + GC time elapsed (ms)=1944 + Total committed heap usage (bytes)=1413292032 + File Input Format Counters + Bytes Read=26057874 +2017-02-18 09:36:42,891 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:36:43,290 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:36:43,295 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:36:44,625 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:36:44,637 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:36:46,271 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:36:46,307 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:36:46,597 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:36:47,633 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local2056867727_0001 +2017-02-18 09:36:49,105 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:36:49,107 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local2056867727_0001 +2017-02-18 09:36:49,118 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:36:49,153 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:36:49,169 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:36:49,480 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:36:49,483 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:36:49,685 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:36:49,784 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:36:49,807 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:36:50,167 INFO org.apache.hadoop.mapreduce.Job: Job job_local2056867727_0001 running in uber mode : false +2017-02-18 09:36:50,176 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:36:50,435 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:36:50,458 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:36:50,459 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:36:50,460 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:36:50,460 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:36:50,487 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:36:50,528 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:36:55,778 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:36:56,207 INFO org.apache.hadoop.mapreduce.Job: map 6% reduce 0% +2017-02-18 09:36:58,787 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:36:59,218 INFO org.apache.hadoop.mapreduce.Job: map 14% reduce 0% +2017-02-18 09:37:01,669 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:37:01,672 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:37:01,789 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:02,230 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:37:04,793 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:07,797 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:10,807 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:13,808 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:16,815 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:17,664 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.bz2] +2017-02-18 09:37:19,819 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:22,831 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:23,735 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:37:23,769 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 09:37:23,775 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000000_0' done. +2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:23,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:23,787 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:23,789 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:23,793 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:37:24,086 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:37:24,095 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:37:24,101 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:37:24,115 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:37:24,384 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:37:27,137 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:37:27,148 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:37:27,149 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:37:27,402 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 09:37:29,799 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:30,413 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 09:37:32,802 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:33,153 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:37:33,163 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 09:37:33,168 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:37:33,171 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000001_0' done. +2017-02-18 09:37:33,172 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:33,173 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:33,179 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:33,180 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:33,187 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:37:33,424 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:37:33,475 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:37:33,481 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:37:33,487 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:37:33,489 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:37:33,489 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:37:33,497 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:37:33,499 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:37:35,611 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:37:35,620 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:37:35,621 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:37:35,622 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:37:35,622 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:37:36,442 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:37:39,198 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:37:39,462 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 09:37:40,255 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:37:40,263 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 09:37:40,269 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:37:40,272 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_m_000002_0' done. +2017-02-18 09:37:40,273 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:40,274 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:37:40,342 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 09:37:40,343 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000000_0 +2017-02-18 09:37:40,390 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:40,391 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:40,405 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7018394a +2017-02-18 09:37:40,468 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:37:40,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:40,534 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:40,699 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.bz2] +2017-02-18 09:37:40,703 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 61 len: 103 to MEMORY +2017-02-18 09:37:40,740 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 61 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:40,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 61, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->61 +2017-02-18 09:37:40,777 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 30 len: 78 to MEMORY +2017-02-18 09:37:40,785 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 30 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:40,789 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 30, inMemoryMapOutputs.size() -> 2, commitMemory -> 61, usedMemory ->91 +2017-02-18 09:37:40,794 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 31 len: 82 to MEMORY +2017-02-18 09:37:40,800 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:40,814 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 91, usedMemory ->122 +2017-02-18 09:37:40,817 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:40,818 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:40,819 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:40,851 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:40,865 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 104 bytes +2017-02-18 09:37:40,909 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 122 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:40,921 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 153 bytes from disk +2017-02-18 09:37:40,926 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:40,931 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:40,935 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 110 bytes +2017-02-18 09:37:40,938 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:40,984 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 09:37:40,987 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 09:37:40,988 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:40,988 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000000_0 is allowed to commit now +2017-02-18 09:37:40,989 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000000 +2017-02-18 09:37:41,004 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,008 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000000_0' done. +2017-02-18 09:37:41,009 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000000_0 +2017-02-18 09:37:41,010 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000001_0 +2017-02-18 09:37:41,018 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,019 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,020 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@21fdb35f +2017-02-18 09:37:41,027 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,048 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,060 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 154 len: 171 to MEMORY +2017-02-18 09:37:41,065 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 154 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,071 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 154, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->154 +2017-02-18 09:37:41,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 69 len: 110 to MEMORY +2017-02-18 09:37:41,084 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 69 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,088 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 69, inMemoryMapOutputs.size() -> 2, commitMemory -> 154, usedMemory ->223 +2017-02-18 09:37:41,094 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 39 len: 85 to MEMORY +2017-02-18 09:37:41,100 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 39 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,114 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 3, commitMemory -> 223, usedMemory ->262 +2017-02-18 09:37:41,117 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,117 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,119 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,119 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 253 bytes +2017-02-18 09:37:41,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 262 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 238 bytes from disk +2017-02-18 09:37:41,151 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,155 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 255 bytes +2017-02-18 09:37:41,159 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,201 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 09:37:41,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,204 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000001_0 is allowed to commit now +2017-02-18 09:37:41,208 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000001 +2017-02-18 09:37:41,215 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,216 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000001_0' done. +2017-02-18 09:37:41,216 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000001_0 +2017-02-18 09:37:41,217 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000002_0 +2017-02-18 09:37:41,237 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,238 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,238 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@16f5d08e +2017-02-18 09:37:41,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,259 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,268 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 112 len: 146 to MEMORY +2017-02-18 09:37:41,277 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 112 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,286 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 112, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->112 +2017-02-18 09:37:41,290 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 41 len: 81 to MEMORY +2017-02-18 09:37:41,299 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 41 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,303 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 41, inMemoryMapOutputs.size() -> 2, commitMemory -> 112, usedMemory ->153 +2017-02-18 09:37:41,306 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 51 len: 94 to MEMORY +2017-02-18 09:37:41,313 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 51 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 3, commitMemory -> 153, usedMemory ->204 +2017-02-18 09:37:41,319 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,321 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,322 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,323 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,323 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 186 bytes +2017-02-18 09:37:41,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 204 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,374 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 205 bytes from disk +2017-02-18 09:37:41,377 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,378 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,380 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 194 bytes +2017-02-18 09:37:41,385 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,423 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 09:37:41,433 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,434 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000002_0 is allowed to commit now +2017-02-18 09:37:41,439 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000002 +2017-02-18 09:37:41,450 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,454 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000002_0' done. +2017-02-18 09:37:41,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000002_0 +2017-02-18 09:37:41,455 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000003_0 +2017-02-18 09:37:41,466 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,472 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,472 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 09:37:41,476 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7fae5c75 +2017-02-18 09:37:41,484 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,502 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 63 len: 105 to MEMORY +2017-02-18 09:37:41,515 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 63 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,520 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 63, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->63 +2017-02-18 09:37:41,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:37:41,531 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,537 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 63, usedMemory ->74 +2017-02-18 09:37:41,559 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:37:41,565 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,573 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 74, usedMemory ->85 +2017-02-18 09:37:41,575 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,576 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,578 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,578 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes +2017-02-18 09:37:41,605 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 85 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,610 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk +2017-02-18 09:37:41,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,612 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,616 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 75 bytes +2017-02-18 09:37:41,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,670 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 09:37:41,672 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,673 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000003_0 is allowed to commit now +2017-02-18 09:37:41,678 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000003 +2017-02-18 09:37:41,683 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:41,685 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000003_0' done. +2017-02-18 09:37:41,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000003_0 +2017-02-18 09:37:41,686 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000004_0 +2017-02-18 09:37:41,695 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:41,696 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:41,696 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2f5fea9c +2017-02-18 09:37:41,711 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:41,723 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:41,737 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 52 len: 101 to MEMORY +2017-02-18 09:37:41,753 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 52 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:41,756 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 52, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->52 +2017-02-18 09:37:41,760 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 33 len: 82 to MEMORY +2017-02-18 09:37:41,776 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:41,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 52, usedMemory ->85 +2017-02-18 09:37:41,784 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 31 len: 79 to MEMORY +2017-02-18 09:37:41,813 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:41,820 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 85, usedMemory ->116 +2017-02-18 09:37:41,820 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:41,821 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:41,821 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:41,907 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:41,907 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 100 bytes +2017-02-18 09:37:41,951 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 116 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:41,967 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 147 bytes from disk +2017-02-18 09:37:41,968 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:41,968 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:41,969 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 106 bytes +2017-02-18 09:37:41,976 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,014 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 09:37:42,017 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,029 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000004_0 is allowed to commit now +2017-02-18 09:37:42,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000004 +2017-02-18 09:37:42,040 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000004_0' done. +2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000004_0 +2017-02-18 09:37:42,041 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000005_0 +2017-02-18 09:37:42,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,060 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,068 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@70b91162 +2017-02-18 09:37:42,077 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,107 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:42,117 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 54 len: 100 to MEMORY +2017-02-18 09:37:42,128 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 54 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:42,143 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 54, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->54 +2017-02-18 09:37:42,146 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 23 len: 69 to MEMORY +2017-02-18 09:37:42,162 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:42,166 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 54, usedMemory ->77 +2017-02-18 09:37:42,188 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 23 len: 68 to MEMORY +2017-02-18 09:37:42,194 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:42,195 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->100 +2017-02-18 09:37:42,203 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:42,204 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,204 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:42,206 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:42,206 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 82 bytes +2017-02-18 09:37:42,226 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 100 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:42,245 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 130 bytes from disk +2017-02-18 09:37:42,247 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:42,247 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:42,249 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 09:37:42,266 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,307 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 09:37:42,313 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,313 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000005_0 is allowed to commit now +2017-02-18 09:37:42,314 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000005 +2017-02-18 09:37:42,326 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,349 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000005_0' done. +2017-02-18 09:37:42,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000005_0 +2017-02-18 09:37:42,350 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000006_0 +2017-02-18 09:37:42,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,358 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,359 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3d144d62 +2017-02-18 09:37:42,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,387 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:42,409 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 72 len: 119 to MEMORY +2017-02-18 09:37:42,415 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 72 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:42,418 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 72, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->72 +2017-02-18 09:37:42,422 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 28 len: 75 to MEMORY +2017-02-18 09:37:42,429 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:42,443 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 2, commitMemory -> 72, usedMemory ->100 +2017-02-18 09:37:42,445 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 28 len: 74 to MEMORY +2017-02-18 09:37:42,460 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 28 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:42,466 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 28, inMemoryMapOutputs.size() -> 3, commitMemory -> 100, usedMemory ->128 +2017-02-18 09:37:42,467 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:42,468 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:42,483 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 60% +2017-02-18 09:37:42,532 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:42,533 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 110 bytes +2017-02-18 09:37:42,582 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 128 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 155 bytes from disk +2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:42,583 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:42,593 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 116 bytes +2017-02-18 09:37:42,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,656 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 09:37:42,658 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,658 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000006_0 is allowed to commit now +2017-02-18 09:37:42,659 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000006 +2017-02-18 09:37:42,673 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,673 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000006_0' done. +2017-02-18 09:37:42,674 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000006_0 +2017-02-18 09:37:42,681 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000007_0 +2017-02-18 09:37:42,691 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,692 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,693 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@a82b79d +2017-02-18 09:37:42,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,726 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:42,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 88 len: 120 to MEMORY +2017-02-18 09:37:42,750 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 88 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:42,757 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 88, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->88 +2017-02-18 09:37:42,769 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 11 len: 55 to MEMORY +2017-02-18 09:37:42,771 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:42,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 88, usedMemory ->99 +2017-02-18 09:37:42,797 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:37:42,812 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:42,813 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 99, usedMemory ->101 +2017-02-18 09:37:42,824 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:42,825 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,825 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:42,826 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:42,827 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 87 bytes +2017-02-18 09:37:42,858 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 101 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:42,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 140 bytes from disk +2017-02-18 09:37:42,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:42,878 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:42,879 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 90 bytes +2017-02-18 09:37:42,888 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,929 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 09:37:42,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:42,931 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000007_0 is allowed to commit now +2017-02-18 09:37:42,934 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000007 +2017-02-18 09:37:42,963 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000007_0' done. +2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000007_0 +2017-02-18 09:37:42,964 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000008_0 +2017-02-18 09:37:42,972 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:42,973 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:42,973 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@132393b4 +2017-02-18 09:37:42,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:42,996 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:43,008 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 98 len: 134 to MEMORY +2017-02-18 09:37:43,014 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 98 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:43,024 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 98, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->98 +2017-02-18 09:37:43,028 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 49 len: 92 to MEMORY +2017-02-18 09:37:43,043 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:43,044 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 2, commitMemory -> 98, usedMemory ->147 +2017-02-18 09:37:43,054 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 49 len: 97 to MEMORY +2017-02-18 09:37:43,067 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 49 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:43,073 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 49, inMemoryMapOutputs.size() -> 3, commitMemory -> 147, usedMemory ->196 +2017-02-18 09:37:43,082 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:43,083 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,083 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:43,086 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:43,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 184 bytes +2017-02-18 09:37:43,101 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 196 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:43,131 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 196 bytes from disk +2017-02-18 09:37:43,136 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:43,137 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:43,140 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 188 bytes +2017-02-18 09:37:43,141 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,191 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 09:37:43,196 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,196 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000008_0 is allowed to commit now +2017-02-18 09:37:43,197 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000008 +2017-02-18 09:37:43,205 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:43,207 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000008_0' done. +2017-02-18 09:37:43,223 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000008_0 +2017-02-18 09:37:43,223 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local2056867727_0001_r_000009_0 +2017-02-18 09:37:43,232 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:37:43,233 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:37:43,233 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@54930708 +2017-02-18 09:37:43,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:37:43,270 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local2056867727_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:37:43,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000000_0 decomp: 22 len: 69 to MEMORY +2017-02-18 09:37:43,310 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local2056867727_0001_m_000000_0 +2017-02-18 09:37:43,310 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 09:37:43,329 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000001_0 decomp: 13 len: 58 to MEMORY +2017-02-18 09:37:43,330 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local2056867727_0001_m_000001_0 +2017-02-18 09:37:43,338 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->35 +2017-02-18 09:37:43,340 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local2056867727_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:37:43,364 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local2056867727_0001_m_000002_0 +2017-02-18 09:37:43,365 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37 +2017-02-18 09:37:43,368 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:37:43,370 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,370 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:37:43,371 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:37:43,372 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 22 bytes +2017-02-18 09:37:43,379 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit +2017-02-18 09:37:43,412 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 93 bytes from disk +2017-02-18 09:37:43,413 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:37:43,413 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:37:43,417 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 27 bytes +2017-02-18 09:37:43,433 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,468 INFO org.apache.hadoop.mapred.Task: Task:attempt_local2056867727_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 09:37:43,471 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:37:43,471 INFO org.apache.hadoop.mapred.Task: Task attempt_local2056867727_0001_r_000009_0 is allowed to commit now +2017-02-18 09:37:43,473 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local2056867727_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iii/_temporary/0/task_local2056867727_0001_r_000009 +2017-02-18 09:37:43,484 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:37:43,487 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local2056867727_0001_r_000009_0' done. +2017-02-18 09:37:43,487 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local2056867727_0001_r_000009_0 +2017-02-18 09:37:43,488 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 09:37:43,493 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 09:37:43,633 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local2056867727_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 09:37:44,501 INFO org.apache.hadoop.mapreduce.Job: Job job_local2056867727_0001 failed with state FAILED due to: NA +2017-02-18 09:37:44,719 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=324416757 + FILE: Number of bytes written=3669220 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=2645 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Reduce input groups=77 + Reduce shuffle bytes=2645 + Reduce input records=131 + Reduce output records=77 + Spilled Records=262 + Shuffled Maps =30 + Failed Shuffles=0 + Merged Map outputs=30 + GC time elapsed (ms)=709 + Total committed heap usage (bytes)=2551959552 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=862 +2017-02-18 09:48:17,921 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 09:48:18,238 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.compress.map.output is deprecated. Instead, use mapreduce.map.output.compress +2017-02-18 09:48:18,273 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.map.output.compression.codec is deprecated. Instead, use mapreduce.map.output.compress.codec +2017-02-18 09:48:19,566 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 09:48:19,568 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 09:48:21,035 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 09:48:21,098 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 09:48:21,373 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 09:48:22,374 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local1938545376_0001 +2017-02-18 09:48:23,753 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 09:48:23,754 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local1938545376_0001 +2017-02-18 09:48:23,764 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 09:48:23,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:48:23,806 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 09:48:24,124 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 09:48:24,127 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:48:24,295 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:48:24,379 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:48:24,385 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 09:48:24,758 INFO org.apache.hadoop.mapreduce.Job: Job job_local1938545376_0001 running in uber mode : false +2017-02-18 09:48:24,761 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 09:48:25,010 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:48:25,031 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:48:25,032 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:48:25,033 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:48:25,033 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:48:25,066 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:48:25,088 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:48:30,384 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:48:30,850 INFO org.apache.hadoop.mapreduce.Job: map 7% reduce 0% +2017-02-18 09:48:33,397 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:48:33,876 INFO org.apache.hadoop.mapreduce.Job: map 15% reduce 0% +2017-02-18 09:48:36,121 INFO org.apache.hadoop.mapred.LocalJobRunner: map > map +2017-02-18 09:48:36,123 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:48:36,123 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:48:36,124 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 26925530; bufvoid = 104857600 +2017-02-18 09:48:36,124 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 14765624(59062496); length = 11448773/6553600 +2017-02-18 09:48:36,409 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:48:36,888 INFO org.apache.hadoop.mapreduce.Job: map 22% reduce 0% +2017-02-18 09:48:39,417 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:48:42,419 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:48:45,426 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:48:48,390 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new compressor [.bz2] +2017-02-18 09:48:48,430 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:48:51,436 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:48:54,810 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:48:54,835 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000000_0 is done. And is in the process of committing +2017-02-18 09:48:54,842 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:48:54,846 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000000_0' done. +2017-02-18 09:48:54,847 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:48:54,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:48:54,857 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:48:54,858 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:48:54,865 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 09:48:55,055 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:48:55,162 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:48:55,164 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:48:55,165 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:48:55,172 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:48:55,185 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:48:58,333 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:48:58,341 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:48:58,341 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:48:58,342 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 9171648; bufvoid = 104857600 +2017-02-18 09:48:58,342 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 22120624(88482496); length = 4093773/6553600 +2017-02-18 09:48:59,085 INFO org.apache.hadoop.mapreduce.Job: map 33% reduce 0% +2017-02-18 09:49:00,871 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:49:01,095 INFO org.apache.hadoop.mapreduce.Job: map 56% reduce 0% +2017-02-18 09:49:03,873 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:49:04,061 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:49:04,076 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000001_0 is done. And is in the process of committing +2017-02-18 09:49:04,081 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:49:04,085 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000001_0' done. +2017-02-18 09:49:04,085 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:04,086 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:04,093 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:04,094 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:04,096 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 09:49:04,290 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:49:04,384 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 09:49:04,405 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 09:49:04,406 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 09:49:04,407 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 09:49:04,407 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 09:49:04,414 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 09:49:04,416 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 09:49:06,527 INFO org.apache.hadoop.mapred.LocalJobRunner: +2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: Spilling map output +2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufend = 7541511; bufvoid = 104857600 +2017-02-18 09:49:06,532 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396(104857584); kvend = 23042076(92168304); length = 3172321/6553600 +2017-02-18 09:49:07,315 INFO org.apache.hadoop.mapreduce.Job: map 67% reduce 0% +2017-02-18 09:49:10,103 INFO org.apache.hadoop.mapred.LocalJobRunner: map > sort +2017-02-18 09:49:10,335 INFO org.apache.hadoop.mapreduce.Job: map 89% reduce 0% +2017-02-18 09:49:11,103 INFO org.apache.hadoop.mapred.MapTask: Finished spill 0 +2017-02-18 09:49:11,115 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_m_000002_0 is done. And is in the process of committing +2017-02-18 09:49:11,123 INFO org.apache.hadoop.mapred.LocalJobRunner: map +2017-02-18 09:49:11,124 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_m_000002_0' done. +2017-02-18 09:49:11,125 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:11,126 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 09:49:11,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for reduce tasks +2017-02-18 09:49:11,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000000_0 +2017-02-18 09:49:11,269 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:11,270 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:11,288 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@639e075e +2017-02-18 09:49:11,337 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 0% +2017-02-18 09:49:11,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:11,397 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:11,571 INFO org.apache.hadoop.io.compress.CodecPool: Got brand-new decompressor [.bz2] +2017-02-18 09:49:11,575 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 20 len: 70 to MEMORY +2017-02-18 09:49:11,608 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 20 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:11,619 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 20, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->20 +2017-02-18 09:49:11,633 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:11,642 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:11,646 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 20, usedMemory ->31 +2017-02-18 09:49:11,651 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#1 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:11,658 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:11,662 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 31, usedMemory ->42 +2017-02-18 09:49:11,664 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:11,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:11,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:11,692 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:11,701 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes +2017-02-18 09:49:11,728 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 42 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:11,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 95 bytes from disk +2017-02-18 09:49:11,738 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:11,741 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:11,746 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 33 bytes +2017-02-18 09:49:11,748 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:11,796 INFO org.apache.hadoop.conf.Configuration.deprecation: mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords +2017-02-18 09:49:11,799 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000000_0 is done. And is in the process of committing +2017-02-18 09:49:11,800 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:11,800 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000000_0 is allowed to commit now +2017-02-18 09:49:11,801 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000000_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000000 +2017-02-18 09:49:11,811 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:11,813 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000000_0' done. +2017-02-18 09:49:11,814 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000000_0 +2017-02-18 09:49:11,821 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000001_0 +2017-02-18 09:49:11,832 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:11,833 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:11,834 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@362f58d7 +2017-02-18 09:49:11,840 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:11,854 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000001_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:11,863 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 29 len: 79 to MEMORY +2017-02-18 09:49:11,869 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 29 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:11,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 29, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->29 +2017-02-18 09:49:11,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 9 len: 53 to MEMORY +2017-02-18 09:49:11,897 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:11,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9, inMemoryMapOutputs.size() -> 2, commitMemory -> 29, usedMemory ->38 +2017-02-18 09:49:11,905 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#2 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 9 len: 49 to MEMORY +2017-02-18 09:49:11,919 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 9 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:11,924 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 9, inMemoryMapOutputs.size() -> 3, commitMemory -> 38, usedMemory ->47 +2017-02-18 09:49:11,927 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:11,928 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:11,928 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:11,929 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:11,930 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 38 bytes +2017-02-18 09:49:12,000 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 47 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:12,007 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 104 bytes from disk +2017-02-18 09:49:12,009 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:12,010 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:12,011 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 40 bytes +2017-02-18 09:49:12,016 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,051 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000001_0 is done. And is in the process of committing +2017-02-18 09:49:12,053 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,053 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000001_0 is allowed to commit now +2017-02-18 09:49:12,059 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000001_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000001 +2017-02-18 09:49:12,083 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:12,096 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000001_0' done. +2017-02-18 09:49:12,097 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000001_0 +2017-02-18 09:49:12,098 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000002_0 +2017-02-18 09:49:12,108 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:12,108 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:12,109 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6b121f65 +2017-02-18 09:49:12,115 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:12,134 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000002_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:12,143 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:12,162 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:12,164 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:12,168 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:12,176 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:12,181 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:12,183 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#3 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:12,191 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:12,196 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:12,198 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:12,199 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,199 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:12,203 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:12,203 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:12,210 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:12,242 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:12,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:12,245 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:12,246 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:12,250 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,295 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000002_0 is done. And is in the process of committing +2017-02-18 09:49:12,298 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,298 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000002_0 is allowed to commit now +2017-02-18 09:49:12,299 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000002_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000002 +2017-02-18 09:49:12,304 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:12,307 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000002_0' done. +2017-02-18 09:49:12,307 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000002_0 +2017-02-18 09:49:12,308 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000003_0 +2017-02-18 09:49:12,318 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:12,319 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:12,319 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@533f785b +2017-02-18 09:49:12,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:12,344 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 09:49:12,350 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000003_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:12,361 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 56 to MEMORY +2017-02-18 09:49:12,364 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:12,368 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 +2017-02-18 09:49:12,378 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:12,388 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:12,397 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 +2017-02-18 09:49:12,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#4 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:12,412 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:12,419 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 +2017-02-18 09:49:12,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:12,422 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,422 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:12,424 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:12,425 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:12,459 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:12,477 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 70 bytes from disk +2017-02-18 09:49:12,479 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:12,480 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:12,492 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:12,501 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,547 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000003_0 is done. And is in the process of committing +2017-02-18 09:49:12,549 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,550 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000003_0 is allowed to commit now +2017-02-18 09:49:12,552 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000003_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000003 +2017-02-18 09:49:12,557 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:12,560 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000003_0' done. +2017-02-18 09:49:12,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000003_0 +2017-02-18 09:49:12,561 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000004_0 +2017-02-18 09:49:12,575 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:12,577 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:12,579 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@51d934bf +2017-02-18 09:49:12,586 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:12,599 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000004_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:12,616 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 56 to MEMORY +2017-02-18 09:49:12,621 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:12,625 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 +2017-02-18 09:49:12,628 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:12,635 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:12,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22 +2017-02-18 09:49:12,641 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#5 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:12,648 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:12,652 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33 +2017-02-18 09:49:12,656 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:12,666 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,666 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:12,668 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:12,670 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes +2017-02-18 09:49:12,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:12,705 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 86 bytes from disk +2017-02-18 09:49:12,706 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:12,707 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:12,708 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes +2017-02-18 09:49:12,712 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,744 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000004_0 is done. And is in the process of committing +2017-02-18 09:49:12,747 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,756 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000004_0 is allowed to commit now +2017-02-18 09:49:12,757 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000004_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000004 +2017-02-18 09:49:12,763 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:12,766 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000004_0' done. +2017-02-18 09:49:12,767 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000004_0 +2017-02-18 09:49:12,770 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000005_0 +2017-02-18 09:49:12,777 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:12,778 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:12,785 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1af653a6 +2017-02-18 09:49:12,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:12,811 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000005_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:12,821 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 33 len: 81 to MEMORY +2017-02-18 09:49:12,826 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:12,829 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->33 +2017-02-18 09:49:12,832 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 23 len: 69 to MEMORY +2017-02-18 09:49:12,850 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:12,856 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 2, commitMemory -> 33, usedMemory ->56 +2017-02-18 09:49:12,868 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#6 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 23 len: 68 to MEMORY +2017-02-18 09:49:12,875 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:12,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 3, commitMemory -> 56, usedMemory ->79 +2017-02-18 09:49:12,881 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:12,882 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,882 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:12,884 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:12,884 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 61 bytes +2017-02-18 09:49:12,911 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 79 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:12,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 113 bytes from disk +2017-02-18 09:49:12,927 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:12,928 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:12,929 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 69 bytes +2017-02-18 09:49:12,935 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,979 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000005_0 is done. And is in the process of committing +2017-02-18 09:49:12,981 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:12,981 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000005_0 is allowed to commit now +2017-02-18 09:49:12,984 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000005_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000005 +2017-02-18 09:49:12,987 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:12,989 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000005_0' done. +2017-02-18 09:49:12,990 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000005_0 +2017-02-18 09:49:12,991 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000006_0 +2017-02-18 09:49:13,000 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:13,001 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:13,001 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@685461f1 +2017-02-18 09:49:13,014 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:13,027 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000006_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:13,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:13,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:13,056 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,074 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:13,077 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:13,079 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#7 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,085 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:13,098 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:13,099 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:13,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,100 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:13,102 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:13,102 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:13,122 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:13,149 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:13,150 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:13,151 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000006_0 is done. And is in the process of committing +2017-02-18 09:49:13,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000006_0 is allowed to commit now +2017-02-18 09:49:13,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000006_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000006 +2017-02-18 09:49:13,217 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:13,221 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000006_0' done. +2017-02-18 09:49:13,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000006_0 +2017-02-18 09:49:13,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000007_0 +2017-02-18 09:49:13,234 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:13,235 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:13,238 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5f132bfb +2017-02-18 09:49:13,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:13,260 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000007_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:13,270 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 33 len: 79 to MEMORY +2017-02-18 09:49:13,296 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:13,296 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->33 +2017-02-18 09:49:13,298 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,313 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:13,314 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 33, usedMemory ->35 +2017-02-18 09:49:13,327 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#8 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,330 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:13,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->37 +2017-02-18 09:49:13,339 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:13,340 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,340 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:13,342 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:13,343 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes +2017-02-18 09:49:13,352 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 14% +2017-02-18 09:49:13,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 37 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 93 bytes from disk +2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:13,410 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:13,411 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes +2017-02-18 09:49:13,412 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,458 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000007_0 is done. And is in the process of committing +2017-02-18 09:49:13,459 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,460 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000007_0 is allowed to commit now +2017-02-18 09:49:13,466 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000007_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000007 +2017-02-18 09:49:13,474 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:13,478 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000007_0' done. +2017-02-18 09:49:13,479 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000007_0 +2017-02-18 09:49:13,480 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000008_0 +2017-02-18 09:49:13,487 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:13,488 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:13,496 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@19be1bc9 +2017-02-18 09:49:13,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:13,527 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000008_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:13,549 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 73 to MEMORY +2017-02-18 09:49:13,551 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:13,561 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 +2017-02-18 09:49:13,563 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 59 to MEMORY +2017-02-18 09:49:13,572 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:13,583 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->35 +2017-02-18 09:49:13,588 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#9 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 59 to MEMORY +2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 35, usedMemory ->47 +2017-02-18 09:49:13,606 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:13,607 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,607 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:13,609 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:13,609 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 29 bytes +2017-02-18 09:49:13,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 47 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:13,649 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 101 bytes from disk +2017-02-18 09:49:13,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:13,651 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:13,656 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 37 bytes +2017-02-18 09:49:13,659 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,705 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000008_0 is done. And is in the process of committing +2017-02-18 09:49:13,707 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,707 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000008_0 is allowed to commit now +2017-02-18 09:49:13,708 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000008_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000008 +2017-02-18 09:49:13,715 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:13,722 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000008_0' done. +2017-02-18 09:49:13,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000008_0 +2017-02-18 09:49:13,723 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000009_0 +2017-02-18 09:49:13,739 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:13,740 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:13,744 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f9623fe +2017-02-18 09:49:13,781 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:13,801 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000009_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:13,812 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,816 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:13,828 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:13,830 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,832 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:13,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:13,858 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#10 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:13,867 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:13,875 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:13,880 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:13,881 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,881 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:13,883 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:13,883 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:13,890 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:13,907 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:13,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:13,913 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:13,924 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:13,935 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,976 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000009_0 is done. And is in the process of committing +2017-02-18 09:49:13,977 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:13,977 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000009_0 is allowed to commit now +2017-02-18 09:49:13,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000009_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000009 +2017-02-18 09:49:13,990 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:13,996 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000009_0' done. +2017-02-18 09:49:13,997 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000009_0 +2017-02-18 09:49:13,997 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000010_0 +2017-02-18 09:49:14,014 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:14,015 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:14,025 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2926cd3a +2017-02-18 09:49:14,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:14,046 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000010_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:14,055 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:14,061 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:14,066 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:14,069 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:14,081 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:14,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:14,098 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#11 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:14,106 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:14,110 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:14,112 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:14,113 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,113 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:14,116 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:14,116 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:14,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:14,150 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:14,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:14,152 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:14,153 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:14,154 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,186 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000010_0 is done. And is in the process of committing +2017-02-18 09:49:14,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,188 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000010_0 is allowed to commit now +2017-02-18 09:49:14,189 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000010_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000010 +2017-02-18 09:49:14,191 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:14,194 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000010_0' done. +2017-02-18 09:49:14,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000010_0 +2017-02-18 09:49:14,195 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000011_0 +2017-02-18 09:49:14,202 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:14,202 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:14,203 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@38ea76e6 +2017-02-18 09:49:14,219 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:14,232 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000011_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:14,241 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 44 len: 92 to MEMORY +2017-02-18 09:49:14,248 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 44 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:14,256 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 44, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->44 +2017-02-18 09:49:14,261 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 33 len: 81 to MEMORY +2017-02-18 09:49:14,268 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 33 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:14,271 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 33, inMemoryMapOutputs.size() -> 2, commitMemory -> 44, usedMemory ->77 +2017-02-18 09:49:14,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#12 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 13 len: 58 to MEMORY +2017-02-18 09:49:14,297 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:14,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 77, usedMemory ->90 +2017-02-18 09:49:14,303 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:14,305 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,305 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:14,306 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:14,306 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 69 bytes +2017-02-18 09:49:14,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 90 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:14,337 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 128 bytes from disk +2017-02-18 09:49:14,339 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:14,339 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:14,341 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 79 bytes +2017-02-18 09:49:14,351 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,354 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 22% +2017-02-18 09:49:14,388 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000011_0 is done. And is in the process of committing +2017-02-18 09:49:14,390 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,390 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000011_0 is allowed to commit now +2017-02-18 09:49:14,391 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000011_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000011 +2017-02-18 09:49:14,395 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:14,397 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000011_0' done. +2017-02-18 09:49:14,398 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000011_0 +2017-02-18 09:49:14,399 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000012_0 +2017-02-18 09:49:14,431 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:14,432 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:14,447 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3038859d +2017-02-18 09:49:14,461 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:14,475 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000012_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:14,491 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 32 len: 80 to MEMORY +2017-02-18 09:49:14,506 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 32 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:14,506 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 32, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->32 +2017-02-18 09:49:14,531 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 22 len: 65 to MEMORY +2017-02-18 09:49:14,540 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:14,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 2, commitMemory -> 32, usedMemory ->54 +2017-02-18 09:49:14,548 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#13 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 60 to MEMORY +2017-02-18 09:49:14,559 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:14,564 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 54, usedMemory ->66 +2017-02-18 09:49:14,568 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:14,569 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,569 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:14,570 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:14,570 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 48 bytes +2017-02-18 09:49:14,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 66 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:14,602 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 110 bytes from disk +2017-02-18 09:49:14,603 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:14,604 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:14,605 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 56 bytes +2017-02-18 09:49:14,609 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,648 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000012_0 is done. And is in the process of committing +2017-02-18 09:49:14,650 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,650 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000012_0 is allowed to commit now +2017-02-18 09:49:14,651 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000012_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000012 +2017-02-18 09:49:14,656 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:14,659 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000012_0' done. +2017-02-18 09:49:14,659 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000012_0 +2017-02-18 09:49:14,660 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000013_0 +2017-02-18 09:49:14,668 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:14,669 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:14,670 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1498c437 +2017-02-18 09:49:14,677 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:14,698 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000013_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:14,709 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:14,715 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:14,722 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:14,727 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:14,733 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:14,737 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:14,740 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#14 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:14,747 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:14,750 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:14,752 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:14,753 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,753 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:14,775 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:14,782 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:14,818 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:14,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:14,831 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:14,832 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:14,833 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:14,837 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,869 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000013_0 is done. And is in the process of committing +2017-02-18 09:49:14,870 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:14,870 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000013_0 is allowed to commit now +2017-02-18 09:49:14,871 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000013_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000013 +2017-02-18 09:49:14,873 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:14,885 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000013_0' done. +2017-02-18 09:49:14,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000013_0 +2017-02-18 09:49:14,887 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000014_0 +2017-02-18 09:49:14,893 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:14,893 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:14,894 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@37250eeb +2017-02-18 09:49:14,903 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:14,920 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000014_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:14,934 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 56 to MEMORY +2017-02-18 09:49:14,951 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:14,952 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 +2017-02-18 09:49:14,971 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:14,995 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:14,995 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 +2017-02-18 09:49:14,997 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#15 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,010 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:15,021 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 +2017-02-18 09:49:15,021 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:15,022 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,022 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:15,024 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:15,025 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:15,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:15,074 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 70 bytes from disk +2017-02-18 09:49:15,075 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:15,075 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:15,081 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:15,082 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,115 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000014_0 is done. And is in the process of committing +2017-02-18 09:49:15,118 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,119 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000014_0 is allowed to commit now +2017-02-18 09:49:15,120 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000014_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000014 +2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000014_0' done. +2017-02-18 09:49:15,129 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000014_0 +2017-02-18 09:49:15,137 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000015_0 +2017-02-18 09:49:15,148 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:15,149 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:15,150 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@42005598 +2017-02-18 09:49:15,178 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:15,189 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000015_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:15,197 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,203 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:15,207 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:15,210 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,228 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:15,239 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:15,241 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#16 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,255 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:15,262 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:15,263 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:15,264 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:15,265 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:15,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:15,324 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:15,325 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:15,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:15,335 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:15,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:15,347 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,356 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 30% +2017-02-18 09:49:15,382 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000015_0 is done. And is in the process of committing +2017-02-18 09:49:15,384 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,384 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000015_0 is allowed to commit now +2017-02-18 09:49:15,385 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000015_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000015 +2017-02-18 09:49:15,388 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:15,390 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000015_0' done. +2017-02-18 09:49:15,391 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000015_0 +2017-02-18 09:49:15,392 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000016_0 +2017-02-18 09:49:15,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:15,423 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:15,424 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@562962ff +2017-02-18 09:49:15,431 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:15,445 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000016_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:15,448 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 75 to MEMORY +2017-02-18 09:49:15,458 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:15,463 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 +2017-02-18 09:49:15,489 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,491 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:15,495 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 +2017-02-18 09:49:15,498 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#17 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,504 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:15,508 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 +2017-02-18 09:49:15,509 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:15,510 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,510 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:15,513 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:15,513 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes +2017-02-18 09:49:15,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:15,568 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 89 bytes from disk +2017-02-18 09:49:15,570 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:15,570 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:15,571 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes +2017-02-18 09:49:15,578 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,620 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000016_0 is done. And is in the process of committing +2017-02-18 09:49:15,622 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,622 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000016_0 is allowed to commit now +2017-02-18 09:49:15,626 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000016_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000016 +2017-02-18 09:49:15,635 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:15,635 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000016_0' done. +2017-02-18 09:49:15,636 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000016_0 +2017-02-18 09:49:15,646 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000017_0 +2017-02-18 09:49:15,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:15,653 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:15,653 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7a960b08 +2017-02-18 09:49:15,660 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:15,670 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000017_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:15,690 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 13 len: 58 to MEMORY +2017-02-18 09:49:15,706 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:15,710 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->13 +2017-02-18 09:49:15,713 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,720 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:15,723 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 13, usedMemory ->15 +2017-02-18 09:49:15,726 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#18 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:15,745 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:15,745 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17 +2017-02-18 09:49:15,746 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:15,746 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,747 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:15,748 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:15,758 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:15,782 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:15,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 72 bytes from disk +2017-02-18 09:49:15,797 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:15,798 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:15,799 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:15,803 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,828 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000017_0 is done. And is in the process of committing +2017-02-18 09:49:15,837 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,837 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000017_0 is allowed to commit now +2017-02-18 09:49:15,838 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000017_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000017 +2017-02-18 09:49:15,845 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:15,847 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000017_0' done. +2017-02-18 09:49:15,848 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000017_0 +2017-02-18 09:49:15,854 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000018_0 +2017-02-18 09:49:15,860 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:15,861 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:15,861 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1a08ec1c +2017-02-18 09:49:15,866 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:15,882 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000018_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:15,890 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 40 len: 94 to MEMORY +2017-02-18 09:49:15,906 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 40 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:15,910 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 40, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->40 +2017-02-18 09:49:15,913 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 31 len: 77 to MEMORY +2017-02-18 09:49:15,923 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:15,925 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 2, commitMemory -> 40, usedMemory ->71 +2017-02-18 09:49:15,927 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#19 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 31 len: 80 to MEMORY +2017-02-18 09:49:15,940 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:15,945 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 3, commitMemory -> 71, usedMemory ->102 +2017-02-18 09:49:15,947 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:15,948 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:15,948 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:15,949 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:15,949 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 84 bytes +2017-02-18 09:49:15,978 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 102 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:15,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 137 bytes from disk +2017-02-18 09:49:15,992 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:15,993 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:15,994 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 92 bytes +2017-02-18 09:49:15,999 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,027 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000018_0 is done. And is in the process of committing +2017-02-18 09:49:16,029 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000018_0 is allowed to commit now +2017-02-18 09:49:16,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000018_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000018 +2017-02-18 09:49:16,041 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:16,043 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000018_0' done. +2017-02-18 09:49:16,044 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000018_0 +2017-02-18 09:49:16,045 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000019_0 +2017-02-18 09:49:16,051 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:16,052 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:16,053 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cac98e5 +2017-02-18 09:49:16,060 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:16,074 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000019_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:16,082 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,097 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:16,099 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:16,104 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,114 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:16,124 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:16,128 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#20 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,134 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:16,138 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:16,140 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:16,141 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,142 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:16,143 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:16,145 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:16,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:16,183 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:16,185 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:16,186 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:16,187 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:16,188 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,221 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000019_0 is done. And is in the process of committing +2017-02-18 09:49:16,222 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,222 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000019_0 is allowed to commit now +2017-02-18 09:49:16,223 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000019_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000019 +2017-02-18 09:49:16,227 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:16,230 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000019_0' done. +2017-02-18 09:49:16,230 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000019_0 +2017-02-18 09:49:16,231 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000020_0 +2017-02-18 09:49:16,244 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:16,245 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:16,245 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@29a3caf2 +2017-02-18 09:49:16,251 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:16,267 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000020_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:16,273 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 74 to MEMORY +2017-02-18 09:49:16,284 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:16,288 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 +2017-02-18 09:49:16,296 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:49:16,300 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:16,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->34 +2017-02-18 09:49:16,314 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#21 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,327 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:16,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->36 +2017-02-18 09:49:16,334 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:16,335 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,335 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:16,337 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:16,337 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 21 bytes +2017-02-18 09:49:16,361 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 36 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:16,365 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 40% +2017-02-18 09:49:16,373 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 96 bytes from disk +2017-02-18 09:49:16,375 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:16,377 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:16,379 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes +2017-02-18 09:49:16,382 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,407 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000020_0 is done. And is in the process of committing +2017-02-18 09:49:16,409 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,409 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000020_0 is allowed to commit now +2017-02-18 09:49:16,410 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000020_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000020 +2017-02-18 09:49:16,413 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:16,415 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000020_0' done. +2017-02-18 09:49:16,416 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000020_0 +2017-02-18 09:49:16,416 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000021_0 +2017-02-18 09:49:16,422 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:16,423 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:16,423 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@610f3309 +2017-02-18 09:49:16,441 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:16,456 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000021_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:16,465 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,482 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:16,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:16,508 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,526 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:16,526 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:16,528 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#22 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,547 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:16,548 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:16,548 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:16,549 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,549 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:16,550 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:16,558 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:16,591 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:16,593 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:16,593 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:16,597 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:16,599 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:16,603 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,634 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000021_0 is done. And is in the process of committing +2017-02-18 09:49:16,635 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,636 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000021_0 is allowed to commit now +2017-02-18 09:49:16,652 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000021_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000021 +2017-02-18 09:49:16,660 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:16,661 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000021_0' done. +2017-02-18 09:49:16,661 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000021_0 +2017-02-18 09:49:16,665 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000022_0 +2017-02-18 09:49:16,677 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:16,678 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:16,685 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1906c58a +2017-02-18 09:49:16,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:16,705 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000022_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:16,722 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 32 len: 79 to MEMORY +2017-02-18 09:49:16,739 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 32 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:16,739 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 32, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->32 +2017-02-18 09:49:16,741 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,761 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:16,770 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 32, usedMemory ->34 +2017-02-18 09:49:16,786 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#23 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 12 len: 57 to MEMORY +2017-02-18 09:49:16,788 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:16,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->46 +2017-02-18 09:49:16,794 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:16,795 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,795 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:16,798 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:16,798 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 33 bytes +2017-02-18 09:49:16,834 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 46 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:16,835 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 99 bytes from disk +2017-02-18 09:49:16,843 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:16,847 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:16,849 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 37 bytes +2017-02-18 09:49:16,852 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,880 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000022_0 is done. And is in the process of committing +2017-02-18 09:49:16,883 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,890 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000022_0 is allowed to commit now +2017-02-18 09:49:16,894 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000022_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000022 +2017-02-18 09:49:16,899 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:16,902 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000022_0' done. +2017-02-18 09:49:16,904 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000022_0 +2017-02-18 09:49:16,905 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000023_0 +2017-02-18 09:49:16,914 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:16,915 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:16,915 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@40151a53 +2017-02-18 09:49:16,926 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:16,939 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000023_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:16,945 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 70 to MEMORY +2017-02-18 09:49:16,951 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:16,954 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 +2017-02-18 09:49:16,974 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,982 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:16,983 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 +2017-02-18 09:49:16,985 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#24 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:16,991 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:16,996 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 +2017-02-18 09:49:16,998 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:16,999 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:16,999 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:17,002 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:17,002 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes +2017-02-18 09:49:17,045 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:17,047 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 84 bytes from disk +2017-02-18 09:49:17,050 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:17,051 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:17,053 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes +2017-02-18 09:49:17,060 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,090 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000023_0 is done. And is in the process of committing +2017-02-18 09:49:17,092 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,092 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000023_0 is allowed to commit now +2017-02-18 09:49:17,093 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000023_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000023 +2017-02-18 09:49:17,097 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:17,100 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000023_0' done. +2017-02-18 09:49:17,100 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000023_0 +2017-02-18 09:49:17,101 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000024_0 +2017-02-18 09:49:17,116 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:17,117 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:17,117 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@9fc1ec5 +2017-02-18 09:49:17,122 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:17,137 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000024_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:17,143 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,149 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:17,152 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:17,159 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,170 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:17,174 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:17,178 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#25 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,188 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:17,211 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:17,212 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:17,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:17,215 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:17,216 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:17,223 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:17,244 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:17,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:17,246 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:17,248 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:17,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,299 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000024_0 is done. And is in the process of committing +2017-02-18 09:49:17,301 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,301 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000024_0 is allowed to commit now +2017-02-18 09:49:17,302 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000024_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000024 +2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000024_0' done. +2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000024_0 +2017-02-18 09:49:17,303 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000025_0 +2017-02-18 09:49:17,322 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:17,323 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:17,324 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2793d5d7 +2017-02-18 09:49:17,333 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:17,344 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000025_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:17,350 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,363 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:17,367 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:17,370 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,374 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 09:49:17,377 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:17,381 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:17,392 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#26 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,395 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:17,399 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:17,401 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:17,402 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,402 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:17,403 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:17,406 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:17,429 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:17,446 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:17,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:17,449 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:17,472 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:17,473 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,498 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000025_0 is done. And is in the process of committing +2017-02-18 09:49:17,500 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,500 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000025_0 is allowed to commit now +2017-02-18 09:49:17,518 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000025_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000025 +2017-02-18 09:49:17,520 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000025_0' done. +2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000025_0 +2017-02-18 09:49:17,524 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000026_0 +2017-02-18 09:49:17,535 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:17,536 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:17,537 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5413556b +2017-02-18 09:49:17,545 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:17,560 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000026_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:17,567 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:17,572 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:17,577 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 +2017-02-18 09:49:17,580 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:17,586 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:17,589 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22 +2017-02-18 09:49:17,592 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#27 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:17,597 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:17,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33 +2017-02-18 09:49:17,610 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:17,611 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,611 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:17,613 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:17,613 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes +2017-02-18 09:49:17,633 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:17,650 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk +2017-02-18 09:49:17,652 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:17,652 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:17,653 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes +2017-02-18 09:49:17,657 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,693 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000026_0 is done. And is in the process of committing +2017-02-18 09:49:17,696 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,709 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000026_0 is allowed to commit now +2017-02-18 09:49:17,710 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000026_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000026 +2017-02-18 09:49:17,711 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:17,719 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000026_0' done. +2017-02-18 09:49:17,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000026_0 +2017-02-18 09:49:17,720 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000027_0 +2017-02-18 09:49:17,729 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:17,730 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:17,730 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@cee97a9 +2017-02-18 09:49:17,745 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:17,765 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000027_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:17,771 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 14 len: 60 to MEMORY +2017-02-18 09:49:17,781 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 14 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:17,786 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 14, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->14 +2017-02-18 09:49:17,788 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,807 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:17,808 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 14, usedMemory ->16 +2017-02-18 09:49:17,810 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#28 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:17,827 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:17,827 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 16, usedMemory ->18 +2017-02-18 09:49:17,828 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:17,829 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,829 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:17,830 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:17,838 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:17,859 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 18 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:17,878 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 74 bytes from disk +2017-02-18 09:49:17,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:17,880 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:17,881 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:17,891 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,941 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000027_0 is done. And is in the process of committing +2017-02-18 09:49:17,942 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:17,942 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000027_0 is allowed to commit now +2017-02-18 09:49:17,943 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000027_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000027 +2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000027_0' done. +2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000027_0 +2017-02-18 09:49:17,949 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000028_0 +2017-02-18 09:49:17,960 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:17,961 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:17,961 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@760bfc86 +2017-02-18 09:49:17,973 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:17,990 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000028_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:17,999 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 19 len: 64 to MEMORY +2017-02-18 09:49:18,010 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:18,010 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->19 +2017-02-18 09:49:18,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 10 len: 54 to MEMORY +2017-02-18 09:49:18,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:18,050 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10, inMemoryMapOutputs.size() -> 2, commitMemory -> 19, usedMemory ->29 +2017-02-18 09:49:18,053 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#29 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 10 len: 55 to MEMORY +2017-02-18 09:49:18,071 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 10 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:18,071 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 10, inMemoryMapOutputs.size() -> 3, commitMemory -> 29, usedMemory ->39 +2017-02-18 09:49:18,072 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:18,072 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,073 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:18,075 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:18,076 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes +2017-02-18 09:49:18,125 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 39 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:18,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 90 bytes from disk +2017-02-18 09:49:18,132 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:18,133 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:18,175 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 31 bytes +2017-02-18 09:49:18,176 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,204 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000028_0 is done. And is in the process of committing +2017-02-18 09:49:18,213 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,218 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000028_0 is allowed to commit now +2017-02-18 09:49:18,219 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000028_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000028 +2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000028_0' done. +2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000028_0 +2017-02-18 09:49:18,222 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000029_0 +2017-02-18 09:49:18,230 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:18,235 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:18,246 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6cf7a61b +2017-02-18 09:49:18,260 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:18,283 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000029_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:18,287 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:18,292 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:18,304 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:18,312 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 13 len: 58 to MEMORY +2017-02-18 09:49:18,315 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:18,318 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->15 +2017-02-18 09:49:18,323 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#30 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:18,327 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:18,329 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17 +2017-02-18 09:49:18,331 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:18,334 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:18,335 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:18,344 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:18,351 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:18,380 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 58% +2017-02-18 09:49:18,382 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 72 bytes from disk +2017-02-18 09:49:18,385 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:18,387 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:18,388 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:18,394 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,432 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000029_0 is done. And is in the process of committing +2017-02-18 09:49:18,434 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,434 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000029_0 is allowed to commit now +2017-02-18 09:49:18,435 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000029_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000029 +2017-02-18 09:49:18,438 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:18,443 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000029_0' done. +2017-02-18 09:49:18,444 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000029_0 +2017-02-18 09:49:18,449 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000030_0 +2017-02-18 09:49:18,455 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:18,455 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:18,456 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6f37d9b5 +2017-02-18 09:49:18,460 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:18,480 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000030_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:18,492 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 68 to MEMORY +2017-02-18 09:49:18,495 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:18,499 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 09:49:18,502 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 60 to MEMORY +2017-02-18 09:49:18,508 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:18,511 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->34 +2017-02-18 09:49:18,514 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#31 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 22 len: 70 to MEMORY +2017-02-18 09:49:18,519 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:18,522 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 3, commitMemory -> 34, usedMemory ->56 +2017-02-18 09:49:18,524 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:18,525 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,525 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:18,527 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:18,527 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 38 bytes +2017-02-18 09:49:18,574 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 56 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:18,579 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 104 bytes from disk +2017-02-18 09:49:18,580 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:18,581 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:18,582 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 46 bytes +2017-02-18 09:49:18,586 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,619 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000030_0 is done. And is in the process of committing +2017-02-18 09:49:18,621 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,621 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000030_0 is allowed to commit now +2017-02-18 09:49:18,623 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000030_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000030 +2017-02-18 09:49:18,626 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:18,629 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000030_0' done. +2017-02-18 09:49:18,629 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000030_0 +2017-02-18 09:49:18,630 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000031_0 +2017-02-18 09:49:18,642 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:18,643 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:18,644 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1d2ab9ab +2017-02-18 09:49:18,651 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:18,665 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000031_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:18,672 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 43 len: 94 to MEMORY +2017-02-18 09:49:18,678 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 43 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:18,681 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 43, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->43 +2017-02-18 09:49:18,684 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:49:18,690 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:18,701 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 43, usedMemory ->54 +2017-02-18 09:49:18,708 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#32 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 21 len: 70 to MEMORY +2017-02-18 09:49:18,712 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 21 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:18,716 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 21, inMemoryMapOutputs.size() -> 3, commitMemory -> 54, usedMemory ->75 +2017-02-18 09:49:18,718 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:18,719 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,719 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:18,720 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:18,721 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 59 bytes +2017-02-18 09:49:18,765 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 75 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:18,767 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 124 bytes from disk +2017-02-18 09:49:18,769 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:18,769 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:18,771 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 65 bytes +2017-02-18 09:49:18,775 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,811 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000031_0 is done. And is in the process of committing +2017-02-18 09:49:18,816 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,816 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000031_0 is allowed to commit now +2017-02-18 09:49:18,817 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000031_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000031 +2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000031_0' done. +2017-02-18 09:49:18,830 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000031_0 +2017-02-18 09:49:18,831 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000032_0 +2017-02-18 09:49:18,840 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:18,842 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:18,843 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3fb90b1b +2017-02-18 09:49:18,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:18,869 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000032_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:18,876 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 20 len: 66 to MEMORY +2017-02-18 09:49:18,882 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 20 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:18,885 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 20, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->20 +2017-02-18 09:49:18,898 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:18,901 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:18,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 20, usedMemory ->31 +2017-02-18 09:49:18,915 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#33 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:49:18,922 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:18,929 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 31, usedMemory ->42 +2017-02-18 09:49:18,932 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:18,940 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:18,940 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:18,941 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:18,941 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 27 bytes +2017-02-18 09:49:18,981 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 42 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:18,985 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 87 bytes from disk +2017-02-18 09:49:18,987 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:18,988 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:18,989 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 33 bytes +2017-02-18 09:49:18,993 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,028 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000032_0 is done. And is in the process of committing +2017-02-18 09:49:19,030 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,030 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000032_0 is allowed to commit now +2017-02-18 09:49:19,031 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000032_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000032 +2017-02-18 09:49:19,036 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:19,038 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000032_0' done. +2017-02-18 09:49:19,038 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000032_0 +2017-02-18 09:49:19,039 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000033_0 +2017-02-18 09:49:19,046 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:19,046 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:19,047 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@44aa3f1a +2017-02-18 09:49:19,053 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:19,069 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000033_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:19,075 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:49:19,081 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:19,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 +2017-02-18 09:49:19,087 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:19,094 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:19,097 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->22 +2017-02-18 09:49:19,099 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#34 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:49:19,115 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:19,118 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 22, usedMemory ->33 +2017-02-18 09:49:19,125 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:19,126 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,126 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:19,127 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:19,127 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 18 bytes +2017-02-18 09:49:19,155 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 33 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:19,158 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 82 bytes from disk +2017-02-18 09:49:19,160 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:19,160 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:19,161 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 24 bytes +2017-02-18 09:49:19,165 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,208 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000033_0 is done. And is in the process of committing +2017-02-18 09:49:19,210 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,210 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000033_0 is allowed to commit now +2017-02-18 09:49:19,211 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000033_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000033 +2017-02-18 09:49:19,215 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:19,218 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000033_0' done. +2017-02-18 09:49:19,219 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000033_0 +2017-02-18 09:49:19,221 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000034_0 +2017-02-18 09:49:19,226 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:19,227 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:19,227 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4b9eaa86 +2017-02-18 09:49:19,233 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:19,248 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000034_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:19,257 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 11 len: 57 to MEMORY +2017-02-18 09:49:19,261 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:19,264 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->11 +2017-02-18 09:49:19,266 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:19,272 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:19,276 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 11, usedMemory ->13 +2017-02-18 09:49:19,278 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#35 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 11 len: 59 to MEMORY +2017-02-18 09:49:19,284 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:19,287 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 3, commitMemory -> 13, usedMemory ->24 +2017-02-18 09:49:19,288 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:19,289 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,289 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:19,290 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:19,291 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 12 bytes +2017-02-18 09:49:19,323 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 24 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:19,332 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 81 bytes from disk +2017-02-18 09:49:19,334 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:19,334 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:19,336 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 15 bytes +2017-02-18 09:49:19,344 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,377 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000034_0 is done. And is in the process of committing +2017-02-18 09:49:19,378 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,379 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000034_0 is allowed to commit now +2017-02-18 09:49:19,379 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000034_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000034 +2017-02-18 09:49:19,384 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 68% +2017-02-18 09:49:19,386 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:19,388 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000034_0' done. +2017-02-18 09:49:19,389 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000034_0 +2017-02-18 09:49:19,394 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000035_0 +2017-02-18 09:49:19,400 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:19,401 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:19,403 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@23bd25db +2017-02-18 09:49:19,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:19,422 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000035_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:19,437 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 72 to MEMORY +2017-02-18 09:49:19,449 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:19,456 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 +2017-02-18 09:49:19,461 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:19,468 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:19,473 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 +2017-02-18 09:49:19,482 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#36 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:19,487 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:19,490 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 +2017-02-18 09:49:19,491 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:19,492 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,492 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:19,504 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:19,504 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes +2017-02-18 09:49:19,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:19,541 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 86 bytes from disk +2017-02-18 09:49:19,544 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:19,545 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:19,547 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes +2017-02-18 09:49:19,550 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,579 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000035_0 is done. And is in the process of committing +2017-02-18 09:49:19,584 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,584 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000035_0 is allowed to commit now +2017-02-18 09:49:19,593 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000035_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000035 +2017-02-18 09:49:19,598 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:19,600 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000035_0' done. +2017-02-18 09:49:19,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000035_0 +2017-02-18 09:49:19,601 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000036_0 +2017-02-18 09:49:19,613 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:19,614 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:19,616 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@f70f500 +2017-02-18 09:49:19,622 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:19,636 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000036_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:19,643 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 31 len: 83 to MEMORY +2017-02-18 09:49:19,653 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 31 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:19,658 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 31, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->31 +2017-02-18 09:49:19,661 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 19 len: 65 to MEMORY +2017-02-18 09:49:19,667 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:19,671 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 2, commitMemory -> 31, usedMemory ->50 +2017-02-18 09:49:19,678 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#37 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 19 len: 67 to MEMORY +2017-02-18 09:49:19,680 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 19 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:19,684 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 19, inMemoryMapOutputs.size() -> 3, commitMemory -> 50, usedMemory ->69 +2017-02-18 09:49:19,685 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:19,686 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,686 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:19,687 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:19,690 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 54 bytes +2017-02-18 09:49:19,725 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 69 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 116 bytes from disk +2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:19,726 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:19,727 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 60 bytes +2017-02-18 09:49:19,729 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,767 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000036_0 is done. And is in the process of committing +2017-02-18 09:49:19,768 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,768 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000036_0 is allowed to commit now +2017-02-18 09:49:19,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000036_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000036 +2017-02-18 09:49:19,773 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:19,775 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000036_0' done. +2017-02-18 09:49:19,776 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000036_0 +2017-02-18 09:49:19,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000037_0 +2017-02-18 09:49:19,783 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:19,785 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:19,786 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4624d4d1 +2017-02-18 09:49:19,792 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:19,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000037_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:19,825 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 68 to MEMORY +2017-02-18 09:49:19,830 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:19,837 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 09:49:19,840 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 11 len: 55 to MEMORY +2017-02-18 09:49:19,848 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 11 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:19,854 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 11, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->33 +2017-02-18 09:49:19,859 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#38 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:19,865 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:19,869 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 33, usedMemory ->35 +2017-02-18 09:49:19,870 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:19,871 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,871 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:19,873 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:19,873 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 23 bytes +2017-02-18 09:49:19,897 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 35 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:19,901 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 87 bytes from disk +2017-02-18 09:49:19,902 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:19,903 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:19,919 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 26 bytes +2017-02-18 09:49:19,920 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,958 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000037_0 is done. And is in the process of committing +2017-02-18 09:49:19,960 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:19,960 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000037_0 is allowed to commit now +2017-02-18 09:49:19,961 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000037_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000037 +2017-02-18 09:49:19,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:19,972 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000037_0' done. +2017-02-18 09:49:19,973 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000037_0 +2017-02-18 09:49:19,974 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000038_0 +2017-02-18 09:49:19,984 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:19,984 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:19,985 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@7d2b79ba +2017-02-18 09:49:19,993 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:20,012 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000038_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:20,020 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 22 len: 70 to MEMORY +2017-02-18 09:49:20,027 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:20,031 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->22 +2017-02-18 09:49:20,033 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,038 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:20,042 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 22, usedMemory ->24 +2017-02-18 09:49:20,051 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#39 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,058 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:20,062 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 24, usedMemory ->26 +2017-02-18 09:49:20,064 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:20,065 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,065 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:20,066 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:20,067 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes +2017-02-18 09:49:20,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 26 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:20,092 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 84 bytes from disk +2017-02-18 09:49:20,094 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:20,094 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:20,098 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 16 bytes +2017-02-18 09:49:20,099 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,145 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000038_0 is done. And is in the process of committing +2017-02-18 09:49:20,147 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,147 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000038_0 is allowed to commit now +2017-02-18 09:49:20,148 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000038_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000038 +2017-02-18 09:49:20,151 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:20,153 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000038_0' done. +2017-02-18 09:49:20,153 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000038_0 +2017-02-18 09:49:20,154 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000039_0 +2017-02-18 09:49:20,162 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:20,163 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:20,164 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@53944f2c +2017-02-18 09:49:20,171 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:20,192 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000039_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:20,199 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 59 to MEMORY +2017-02-18 09:49:20,207 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:20,213 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 +2017-02-18 09:49:20,220 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,230 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:20,232 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 +2017-02-18 09:49:20,244 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#40 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,249 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:20,252 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 +2017-02-18 09:49:20,254 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:20,255 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,255 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:20,257 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:20,257 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:20,272 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:20,295 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 73 bytes from disk +2017-02-18 09:49:20,297 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:20,298 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:20,301 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:20,304 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,337 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000039_0 is done. And is in the process of committing +2017-02-18 09:49:20,338 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,338 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000039_0 is allowed to commit now +2017-02-18 09:49:20,340 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000039_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000039 +2017-02-18 09:49:20,345 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:20,346 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000039_0' done. +2017-02-18 09:49:20,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000039_0 +2017-02-18 09:49:20,347 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000040_0 +2017-02-18 09:49:20,358 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:20,362 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:20,369 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@4cafad79 +2017-02-18 09:49:20,378 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:20,392 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000040_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:20,394 INFO org.apache.hadoop.mapreduce.Job: map 100% reduce 100% +2017-02-18 09:49:20,400 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,406 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:20,409 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:20,412 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,423 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:20,427 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:20,429 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#41 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,445 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:20,448 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:20,450 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:20,451 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,451 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:20,464 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:20,464 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:20,491 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:20,518 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:20,519 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:20,520 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,570 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000040_0 is done. And is in the process of committing +2017-02-18 09:49:20,571 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,571 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000040_0 is allowed to commit now +2017-02-18 09:49:20,572 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000040_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000040 +2017-02-18 09:49:20,573 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:20,574 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000040_0' done. +2017-02-18 09:49:20,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000040_0 +2017-02-18 09:49:20,582 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000041_0 +2017-02-18 09:49:20,594 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:20,595 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:20,595 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@61da1de +2017-02-18 09:49:20,606 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:20,635 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000041_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:20,637 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 44 len: 89 to MEMORY +2017-02-18 09:49:20,645 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 44 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:20,645 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 44, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->44 +2017-02-18 09:49:20,647 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 22 len: 69 to MEMORY +2017-02-18 09:49:20,664 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:20,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 2, commitMemory -> 44, usedMemory ->66 +2017-02-18 09:49:20,666 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#42 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:20,687 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:20,698 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 66, usedMemory ->68 +2017-02-18 09:49:20,702 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:20,704 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,704 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:20,705 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:20,705 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 2 segments left of total size: 54 bytes +2017-02-18 09:49:20,719 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 68 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:20,734 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 111 bytes from disk +2017-02-18 09:49:20,735 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:20,736 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:20,737 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 57 bytes +2017-02-18 09:49:20,740 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,776 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000041_0 is done. And is in the process of committing +2017-02-18 09:49:20,777 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,778 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000041_0 is allowed to commit now +2017-02-18 09:49:20,778 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000041_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000041 +2017-02-18 09:49:20,779 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:20,781 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000041_0' done. +2017-02-18 09:49:20,787 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000041_0 +2017-02-18 09:49:20,788 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000042_0 +2017-02-18 09:49:20,795 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:20,796 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:20,798 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@207bfdc3 +2017-02-18 09:49:20,804 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:20,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000042_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:20,830 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 34 len: 81 to MEMORY +2017-02-18 09:49:20,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 34 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:20,846 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 34, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->34 +2017-02-18 09:49:20,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 12 len: 58 to MEMORY +2017-02-18 09:49:20,856 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:20,863 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 2, commitMemory -> 34, usedMemory ->46 +2017-02-18 09:49:20,866 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#43 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 22 len: 69 to MEMORY +2017-02-18 09:49:20,876 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 22 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:20,877 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 22, inMemoryMapOutputs.size() -> 3, commitMemory -> 46, usedMemory ->68 +2017-02-18 09:49:20,879 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:20,880 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,880 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:20,881 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:20,881 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 50 bytes +2017-02-18 09:49:20,893 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 68 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:20,906 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 113 bytes from disk +2017-02-18 09:49:20,907 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:20,908 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:20,929 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 58 bytes +2017-02-18 09:49:20,931 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,962 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000042_0 is done. And is in the process of committing +2017-02-18 09:49:20,963 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:20,964 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000042_0 is allowed to commit now +2017-02-18 09:49:20,964 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000042_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000042 +2017-02-18 09:49:20,965 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:20,968 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000042_0' done. +2017-02-18 09:49:20,972 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000042_0 +2017-02-18 09:49:20,973 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000043_0 +2017-02-18 09:49:20,980 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:20,981 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:20,984 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1681a6f6 +2017-02-18 09:49:20,990 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:21,003 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000043_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:21,018 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 23 len: 71 to MEMORY +2017-02-18 09:49:21,024 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 23 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:21,029 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 23, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->23 +2017-02-18 09:49:21,036 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,045 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:21,048 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 23, usedMemory ->25 +2017-02-18 09:49:21,050 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#44 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,055 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:21,061 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 25, usedMemory ->27 +2017-02-18 09:49:21,062 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:21,063 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,064 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:21,065 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:21,065 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes +2017-02-18 09:49:21,086 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 27 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:21,102 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 85 bytes from disk +2017-02-18 09:49:21,104 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:21,104 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:21,105 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 17 bytes +2017-02-18 09:49:21,108 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,147 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000043_0 is done. And is in the process of committing +2017-02-18 09:49:21,148 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,149 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000043_0 is allowed to commit now +2017-02-18 09:49:21,149 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000043_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000043 +2017-02-18 09:49:21,150 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000043_0' done. +2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000043_0 +2017-02-18 09:49:21,160 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000044_0 +2017-02-18 09:49:21,175 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:21,176 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:21,177 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@3306c9fa +2017-02-18 09:49:21,182 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:21,196 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000044_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:21,202 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 24 len: 73 to MEMORY +2017-02-18 09:49:21,234 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 24 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:21,234 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 24, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->24 +2017-02-18 09:49:21,248 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 24 len: 71 to MEMORY +2017-02-18 09:49:21,249 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 24 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:21,254 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 24, inMemoryMapOutputs.size() -> 2, commitMemory -> 24, usedMemory ->48 +2017-02-18 09:49:21,255 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#45 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 13 len: 60 to MEMORY +2017-02-18 09:49:21,257 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:21,268 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 3, commitMemory -> 48, usedMemory ->61 +2017-02-18 09:49:21,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:21,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:21,271 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:21,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 3 segments left of total size: 40 bytes +2017-02-18 09:49:21,279 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 61 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:21,307 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 105 bytes from disk +2017-02-18 09:49:21,316 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:21,316 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:21,317 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 50 bytes +2017-02-18 09:49:21,322 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,353 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000044_0 is done. And is in the process of committing +2017-02-18 09:49:21,356 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,356 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000044_0 is allowed to commit now +2017-02-18 09:49:21,365 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000044_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000044 +2017-02-18 09:49:21,376 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:21,376 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000044_0' done. +2017-02-18 09:49:21,377 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000044_0 +2017-02-18 09:49:21,377 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000045_0 +2017-02-18 09:49:21,385 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:21,386 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:21,388 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1d01f7c1 +2017-02-18 09:49:21,396 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:21,421 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000045_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:21,435 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,443 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:21,443 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:21,451 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,454 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:21,468 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:21,471 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#46 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,477 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:21,480 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:21,482 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:21,483 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,483 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:21,484 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:21,484 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:21,519 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:21,522 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:21,523 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:21,526 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:21,528 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:21,543 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,578 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000045_0 is done. And is in the process of committing +2017-02-18 09:49:21,579 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,579 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000045_0 is allowed to commit now +2017-02-18 09:49:21,580 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000045_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000045 +2017-02-18 09:49:21,581 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:21,583 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000045_0' done. +2017-02-18 09:49:21,584 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000045_0 +2017-02-18 09:49:21,585 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000046_0 +2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:21,591 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@6a45aa68 +2017-02-18 09:49:21,604 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:21,617 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000046_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:21,630 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 13 len: 60 to MEMORY +2017-02-18 09:49:21,635 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 13 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:21,639 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 13, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->13 +2017-02-18 09:49:21,656 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,661 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:21,665 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 13, usedMemory ->15 +2017-02-18 09:49:21,667 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#47 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,673 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:21,694 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 15, usedMemory ->17 +2017-02-18 09:49:21,694 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:21,695 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,695 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:21,696 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:21,696 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:21,702 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 17 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 74 bytes from disk +2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:21,703 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:21,740 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:21,742 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,768 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000046_0 is done. And is in the process of committing +2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000046_0 is allowed to commit now +2017-02-18 09:49:21,770 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000046_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000046 +2017-02-18 09:49:21,774 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:21,777 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000046_0' done. +2017-02-18 09:49:21,777 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000046_0 +2017-02-18 09:49:21,781 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000047_0 +2017-02-18 09:49:21,795 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:21,796 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:21,797 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@2c3c5ba6 +2017-02-18 09:49:21,801 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:21,819 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000047_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:21,822 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 14 len: 62 to MEMORY +2017-02-18 09:49:21,838 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 14 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:21,841 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 14, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->14 +2017-02-18 09:49:21,848 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,852 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:21,855 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 14, usedMemory ->16 +2017-02-18 09:49:21,857 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#48 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:21,862 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:21,864 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 16, usedMemory ->18 +2017-02-18 09:49:21,866 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:21,867 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,867 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:21,868 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:21,868 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:21,898 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 18 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:21,912 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 76 bytes from disk +2017-02-18 09:49:21,913 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:21,914 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:21,915 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:21,918 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,946 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000047_0 is done. And is in the process of committing +2017-02-18 09:49:21,958 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:21,967 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000047_0 is allowed to commit now +2017-02-18 09:49:21,969 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000047_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000047 +2017-02-18 09:49:21,969 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:21,970 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000047_0' done. +2017-02-18 09:49:21,976 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000047_0 +2017-02-18 09:49:21,976 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000048_0 +2017-02-18 09:49:21,985 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:21,986 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:21,986 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@1ce764b2 +2017-02-18 09:49:21,991 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:22,015 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000048_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:22,024 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:22,027 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:22,034 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->2 +2017-02-18 09:49:22,043 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:22,048 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:22,051 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 2, usedMemory ->4 +2017-02-18 09:49:22,067 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#49 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:22,068 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:22,084 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 4, usedMemory ->6 +2017-02-18 09:49:22,084 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:22,085 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:22,085 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:22,086 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:22,086 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:22,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 6 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:22,140 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 55 bytes from disk +2017-02-18 09:49:22,141 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:22,141 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:22,142 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 0 segments left of total size: 0 bytes +2017-02-18 09:49:22,155 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:22,184 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000048_0 is done. And is in the process of committing +2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000048_0 is allowed to commit now +2017-02-18 09:49:22,186 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000048_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000048 +2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000048_0' done. +2017-02-18 09:49:22,187 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000048_0 +2017-02-18 09:49:22,188 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local1938545376_0001_r_000049_0 +2017-02-18 09:49:22,193 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 09:49:22,194 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 09:49:22,194 INFO org.apache.hadoop.mapred.ReduceTask: Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@518f2876 +2017-02-18 09:49:22,217 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: MergerManager: memoryLimit=679778688, maxSingleShuffleLimit=169944672, mergeThreshold=448653952, ioSortFactor=10, memToMemMergeOutputsThreshold=10 +2017-02-18 09:49:22,230 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: attempt_local1938545376_0001_r_000049_0 Thread started: EventFetcher for fetching Map Completion Events +2017-02-18 09:49:22,235 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000000_0 decomp: 12 len: 57 to MEMORY +2017-02-18 09:49:22,246 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 12 bytes from map-output for attempt_local1938545376_0001_m_000000_0 +2017-02-18 09:49:22,246 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 12, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->12 +2017-02-18 09:49:22,247 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000001_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:22,248 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000001_0 +2017-02-18 09:49:22,248 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 2, commitMemory -> 12, usedMemory ->14 +2017-02-18 09:49:22,249 INFO org.apache.hadoop.mapreduce.task.reduce.LocalFetcher: localfetcher#50 about to shuffle output of map attempt_local1938545376_0001_m_000002_0 decomp: 2 len: 41 to MEMORY +2017-02-18 09:49:22,250 INFO org.apache.hadoop.mapreduce.task.reduce.InMemoryMapOutput: Read 2 bytes from map-output for attempt_local1938545376_0001_m_000002_0 +2017-02-18 09:49:22,250 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: closeInMemoryFile -> map-output of size: 2, inMemoryMapOutputs.size() -> 3, commitMemory -> 14, usedMemory ->16 +2017-02-18 09:49:22,269 INFO org.apache.hadoop.mapreduce.task.reduce.EventFetcher: EventFetcher is interrupted.. Returning +2017-02-18 09:49:22,270 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:22,270 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: finalMerge called with 3 in-memory map-outputs and 0 on-disk map-outputs +2017-02-18 09:49:22,271 INFO org.apache.hadoop.mapred.Merger: Merging 3 sorted segments +2017-02-18 09:49:22,272 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:22,299 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merged 3 segments, 16 bytes to disk to satisfy reduce memory limit +2017-02-18 09:49:22,300 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 1 files, 71 bytes from disk +2017-02-18 09:49:22,301 INFO org.apache.hadoop.mapreduce.task.reduce.MergeManagerImpl: Merging 0 segments, 0 bytes from memory into reduce +2017-02-18 09:49:22,302 INFO org.apache.hadoop.mapred.Merger: Merging 1 sorted segments +2017-02-18 09:49:22,303 INFO org.apache.hadoop.mapred.Merger: Down to the last merge-pass, with 1 segments left of total size: 6 bytes +2017-02-18 09:49:22,314 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:22,356 INFO org.apache.hadoop.mapred.Task: Task:attempt_local1938545376_0001_r_000049_0 is done. And is in the process of committing +2017-02-18 09:49:22,358 INFO org.apache.hadoop.mapred.LocalJobRunner: 3 / 3 copied. +2017-02-18 09:49:22,358 INFO org.apache.hadoop.mapred.Task: Task attempt_local1938545376_0001_r_000049_0 is allowed to commit now +2017-02-18 09:49:22,359 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: Saved output of task 'attempt_local1938545376_0001_r_000049_0' to file:/home/cloudera/workspace/bpa/Assign1/output_Q1.iiii/_temporary/0/task_local1938545376_0001_r_000049 +2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce > reduce +2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.Task: Task 'attempt_local1938545376_0001_r_000049_0' done. +2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: Finishing task: attempt_local1938545376_0001_r_000049_0 +2017-02-18 09:49:22,360 INFO org.apache.hadoop.mapred.LocalJobRunner: reduce task executor complete. +2017-02-18 09:49:22,639 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local1938545376_0001 +java.lang.NoClassDefFoundError: org/apache/http/client/methods/HttpUriRequest + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:573) +Caused by: java.lang.ClassNotFoundException: org.apache.http.client.methods.HttpUriRequest + at java.net.URLClassLoader$1.run(URLClassLoader.java:366) + at java.net.URLClassLoader$1.run(URLClassLoader.java:355) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:354) + at java.lang.ClassLoader.loadClass(ClassLoader.java:425) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) + at java.lang.ClassLoader.loadClass(ClassLoader.java:358) + ... 1 more +2017-02-18 09:49:23,414 INFO org.apache.hadoop.mapreduce.Job: Job job_local1938545376_0001 failed with state FAILED due to: NA +2017-02-18 09:49:23,878 INFO org.apache.hadoop.mapreduce.Job: Counters: 30 + File System Counters + FILE: Number of bytes read=1378254888 + FILE: Number of bytes written=15479292 + FILE: Number of read operations=0 + FILE: Number of large read operations=0 + FILE: Number of write operations=0 + Map-Reduce Framework + Map input records=507535 + Map output records=4678719 + Map output bytes=43638689 + Map output materialized bytes=8043 + Input split bytes=351 + Combine input records=4678719 + Combine output records=131 + Reduce input groups=77 + Reduce shuffle bytes=8043 + Reduce input records=131 + Reduce output records=77 + Spilled Records=262 + Shuffled Maps =150 + Failed Shuffles=0 + Merged Map outputs=150 + GC time elapsed (ms)=866 + Total committed heap usage (bytes)=10455764992 + Shuffle Errors + BAD_ID=0 + CONNECTION=0 + IO_ERROR=0 + WRONG_LENGTH=0 + WRONG_MAP=0 + WRONG_REDUCE=0 + File Input Format Counters + Bytes Read=26057874 + File Output Format Counters + Bytes Written=1290 +2017-02-18 11:07:03,663 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 11:07:06,204 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 11:07:06,206 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 11:07:08,501 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 11:07:08,612 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 11:07:09,061 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 11:07:10,524 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local119218342_0001 +2017-02-18 11:07:12,698 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 11:07:12,701 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local119218342_0001 +2017-02-18 11:07:12,730 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 11:07:12,798 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:07:12,816 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 11:07:13,323 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 11:07:13,325 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local119218342_0001_m_000000_0 +2017-02-18 11:07:13,592 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:07:13,730 INFO org.apache.hadoop.mapreduce.Job: Job job_local119218342_0001 running in uber mode : false +2017-02-18 11:07:13,731 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 11:07:13,755 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:07:13,777 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 11:07:14,866 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:07:14,884 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:07:14,884 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:07:14,885 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:07:14,885 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:07:14,932 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:07:14,966 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:07:15,056 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:07:15,141 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local119218342_0001_m_000001_0 +2017-02-18 11:07:15,170 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:07:15,172 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:07:15,185 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 11:07:15,720 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:07:15,729 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:07:15,730 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:07:15,730 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:07:15,731 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:07:15,738 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:07:15,756 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:07:15,760 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:07:15,800 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local119218342_0001_m_000002_0 +2017-02-18 11:07:15,806 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:07:15,821 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:07:15,841 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 11:07:16,354 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:07:16,356 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:07:16,356 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:07:16,357 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:07:16,357 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:07:16,366 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:07:16,369 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:07:16,377 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:07:16,404 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 11:07:16,406 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local119218342_0001 +java.lang.Exception: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1078) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715) + at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89) + at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:79) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:1) + at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 11:07:16,741 INFO org.apache.hadoop.mapreduce.Job: Job job_local119218342_0001 failed with state FAILED due to: NA +2017-02-18 11:07:16,789 INFO org.apache.hadoop.mapreduce.Job: Counters: 0 +2017-02-18 11:16:30,603 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 11:16:33,247 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 11:16:33,273 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 11:16:35,743 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 11:16:35,861 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 11:16:36,384 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 11:16:38,351 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local69712961_0001 +2017-02-18 11:16:40,763 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 11:16:40,765 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local69712961_0001 +2017-02-18 11:16:40,781 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 11:16:40,835 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:16:40,849 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 11:16:41,368 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 11:16:41,370 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local69712961_0001_m_000000_0 +2017-02-18 11:16:41,723 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:16:41,768 INFO org.apache.hadoop.mapreduce.Job: Job job_local69712961_0001 running in uber mode : false +2017-02-18 11:16:41,770 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 11:16:41,882 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:16:41,924 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 11:16:42,977 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:16:43,024 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:16:43,026 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:16:43,026 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:16:43,026 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:16:43,079 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:16:43,107 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:16:43,175 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:16:43,281 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local69712961_0001_m_000001_0 +2017-02-18 11:16:43,307 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:16:43,308 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:16:43,326 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 11:16:43,890 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:16:43,893 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:16:43,894 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:16:43,894 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:16:43,895 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:16:43,907 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:16:43,924 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:16:43,925 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:16:43,960 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local69712961_0001_m_000002_0 +2017-02-18 11:16:43,973 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:16:43,974 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:16:43,981 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 11:16:44,555 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:16:44,572 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:16:44,574 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:16:44,576 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:16:44,578 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:16:44,588 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:16:44,594 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:16:44,601 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:16:44,625 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 11:16:44,628 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local69712961_0001 +java.lang.Exception: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1078) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715) + at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89) + at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:79) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:1) + at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 11:16:44,797 INFO org.apache.hadoop.mapreduce.Job: Job job_local69712961_0001 failed with state FAILED due to: NA +2017-02-18 11:16:44,897 INFO org.apache.hadoop.mapreduce.Job: Counters: 0 +2017-02-18 11:17:50,128 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 11:17:52,989 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 11:17:53,009 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 11:17:55,504 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 11:17:55,650 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 11:17:56,165 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 11:17:57,932 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local940732002_0001 +2017-02-18 11:18:00,211 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 11:18:00,213 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local940732002_0001 +2017-02-18 11:18:00,224 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 11:18:00,297 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:18:00,313 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 11:18:00,869 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 11:18:00,871 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local940732002_0001_m_000000_0 +2017-02-18 11:18:01,173 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:18:01,238 INFO org.apache.hadoop.mapreduce.Job: Job job_local940732002_0001 running in uber mode : false +2017-02-18 11:18:01,239 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 11:18:01,313 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:18:01,317 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 11:18:02,333 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:18:02,335 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:18:02,335 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:18:02,335 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:18:02,335 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:18:02,399 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:18:02,437 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:18:02,513 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:18:02,612 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local940732002_0001_m_000001_0 +2017-02-18 11:18:02,641 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:18:02,643 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:18:02,659 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 11:18:03,241 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:18:03,248 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:18:03,249 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:18:03,249 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:18:03,251 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:18:03,267 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:18:03,269 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:18:03,277 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:18:03,289 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local940732002_0001_m_000002_0 +2017-02-18 11:18:03,321 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:18:03,322 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:18:03,330 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 11:18:03,909 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:18:03,926 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:18:03,927 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:18:03,927 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:18:03,927 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:18:03,931 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:18:03,933 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:18:03,951 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:18:03,975 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 11:18:03,977 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local940732002_0001 +java.lang.Exception: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1078) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715) + at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89) + at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:79) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:1) + at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 11:18:04,244 INFO org.apache.hadoop.mapreduce.Job: Job job_local940732002_0001 failed with state FAILED due to: NA +2017-02-18 11:18:04,291 INFO org.apache.hadoop.mapreduce.Job: Counters: 0 +2017-02-18 11:19:50,951 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 11:19:53,889 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 11:19:53,923 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 11:19:54,183 WARN org.apache.hadoop.security.UserGroupInformation: PriviledgedActionException as:cloudera (auth:SIMPLE) cause:org.apache.hadoop.mapred.FileAlreadyExistsException: Output directory file:/home/cloudera/workspace/bpa/Assign1/output_Q2 already exists +2017-02-18 11:20:19,766 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable +2017-02-18 11:20:22,542 INFO org.apache.hadoop.conf.Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id +2017-02-18 11:20:22,575 INFO org.apache.hadoop.metrics.jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId= +2017-02-18 11:20:24,977 WARN org.apache.hadoop.mapreduce.JobResourceUploader: No job jar file set. User classes may not be found. See Job or Job#setJar(String). +2017-02-18 11:20:25,097 INFO org.apache.hadoop.mapreduce.lib.input.FileInputFormat: Total input paths to process : 3 +2017-02-18 11:20:25,621 INFO org.apache.hadoop.mapreduce.JobSubmitter: number of splits:3 +2017-02-18 11:20:27,159 INFO org.apache.hadoop.mapreduce.JobSubmitter: Submitting tokens for job: job_local726282422_0001 +2017-02-18 11:20:29,479 INFO org.apache.hadoop.mapreduce.Job: The url to track the job: http://localhost:8080/ +2017-02-18 11:20:29,481 INFO org.apache.hadoop.mapreduce.Job: Running job: job_local726282422_0001 +2017-02-18 11:20:29,509 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter set in config null +2017-02-18 11:20:29,589 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:20:29,599 INFO org.apache.hadoop.mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter +2017-02-18 11:20:30,156 INFO org.apache.hadoop.mapred.LocalJobRunner: Waiting for map tasks +2017-02-18 11:20:30,164 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local726282422_0001_m_000000_0 +2017-02-18 11:20:30,455 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:20:30,484 INFO org.apache.hadoop.mapreduce.Job: Job job_local726282422_0001 running in uber mode : false +2017-02-18 11:20:30,485 INFO org.apache.hadoop.mapreduce.Job: map 0% reduce 0% +2017-02-18 11:20:30,600 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:20:30,626 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg3200.txt:0+16013935 +2017-02-18 11:20:31,618 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:20:31,650 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:20:31,651 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:20:31,652 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:20:31,653 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:20:31,711 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:20:31,759 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:20:31,810 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:20:31,882 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local726282422_0001_m_000001_0 +2017-02-18 11:20:31,918 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:20:31,919 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:20:31,936 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg100.txt:0+5589889 +2017-02-18 11:20:32,515 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:20:32,515 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:20:32,515 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:20:32,515 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:20:32,518 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:20:32,528 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:20:32,530 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:20:32,542 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:20:32,563 INFO org.apache.hadoop.mapred.LocalJobRunner: Starting task: attempt_local726282422_0001_m_000002_0 +2017-02-18 11:20:32,595 INFO org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter: File Output Committer Algorithm version is 1 +2017-02-18 11:20:32,597 INFO org.apache.hadoop.mapred.Task: Using ResourceCalculatorProcessTree : [ ] +2017-02-18 11:20:32,603 INFO org.apache.hadoop.mapred.MapTask: Processing split: file:/home/cloudera/workspace/bpa/Assign1/pg31100.txt:0+4454050 +2017-02-18 11:20:33,215 INFO org.apache.hadoop.mapred.MapTask: (EQUATOR) 0 kvi 26214396(104857584) +2017-02-18 11:20:33,228 INFO org.apache.hadoop.mapred.MapTask: mapreduce.task.io.sort.mb: 100 +2017-02-18 11:20:33,228 INFO org.apache.hadoop.mapred.MapTask: soft limit at 83886080 +2017-02-18 11:20:33,228 INFO org.apache.hadoop.mapred.MapTask: bufstart = 0; bufvoid = 104857600 +2017-02-18 11:20:33,229 INFO org.apache.hadoop.mapred.MapTask: kvstart = 26214396; length = 6553600 +2017-02-18 11:20:33,239 INFO org.apache.hadoop.mapred.MapTask: Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer +2017-02-18 11:20:33,241 INFO org.apache.hadoop.mapreduce.lib.input.LineRecordReader: Found UTF-8 BOM and skipped it +2017-02-18 11:20:33,248 INFO org.apache.hadoop.mapred.MapTask: Starting flush of map output +2017-02-18 11:20:33,310 INFO org.apache.hadoop.mapred.LocalJobRunner: map task executor complete. +2017-02-18 11:20:33,313 WARN org.apache.hadoop.mapred.LocalJobRunner: job_local726282422_0001 +java.lang.Exception: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489) + at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549) +Caused by: java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, received org.apache.hadoop.io.Text + at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1078) + at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715) + at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89) + at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:79) + at Question2.InvertedIndex$Map.map(InvertedIndex.java:1) + at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145) + at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787) + at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) + at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270) + at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) + at java.util.concurrent.FutureTask.run(FutureTask.java:262) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) + at java.lang.Thread.run(Thread.java:745) +2017-02-18 11:20:33,493 INFO org.apache.hadoop.mapreduce.Job: Job job_local726282422_0001 failed with state FAILED due to: NA +2017-02-18 11:20:33,555 INFO org.apache.hadoop.mapreduce.Job: Counters: 0 diff --git a/Assign1/output_Q2/._SUCCESS.crc b/Assign1/output_Q2/._SUCCESS.crc new file mode 100644 index 0000000000000000000000000000000000000000..3b7b044936a890cd8d651d349a752d819d71d22c Binary files /dev/null and b/Assign1/output_Q2/._SUCCESS.crc differ diff --git a/Assign1/output_Q2/.part-r-00000.crc b/Assign1/output_Q2/.part-r-00000.crc new file mode 100644 index 0000000000000000000000000000000000000000..e1de64db00d01af82c1314830a33475bdede9765 Binary files /dev/null and b/Assign1/output_Q2/.part-r-00000.crc differ diff --git a/Assign1/output_Q2/_SUCCESS b/Assign1/output_Q2/_SUCCESS new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Assign1/output_Q2/part-r-00000 b/Assign1/output_Q2/part-r-00000 new file mode 100644 index 0000000000000000000000000000000000000000..57a3024b59d62fab283cd4d71efc5a521cc5a056 --- /dev/null +++ b/Assign1/output_Q2/part-r-00000 @@ -0,0 +1,87026 @@ + pg31100.txt +! pg3200.txt +!" pg3200.txt +" pg3200.txt, pg100.txt +"'fnobjectionbilltakuzhlcoixrssoreferred!'" pg3200.txt +"'i pg31100.txt +"'journal'" pg3200.txt +"'kahkahponeeka'?" pg3200.txt +"'nothing.' pg3200.txt +"'of pg3200.txt +"'pears pg3200.txt +"'tis pg31100.txt +"'yes.' pg3200.txt +") pg3200.txt +"-------------------" pg3200.txt +"--like pg3200.txt +"--no, pg3200.txt +"108" pg3200.txt +"1601 pg3200.txt +"40," pg3200.txt +"46 pg3200.txt +"513." pg3200.txt +"52 pg3200.txt +"_fire!_" pg3200.txt +"_i_ pg31100.txt +"a pg31100.txt, pg3200.txt +"a-a-a-a-men!" pg3200.txt +"ab-so-lutely pg3200.txt +"above pg3200.txt +"absolutely." pg3200.txt +"accepted pg3200.txt +"advise" pg31100.txt +"aeneas," pg3200.txt +"afflicted," pg3200.txt +"aforesaids" pg3200.txt +"after pg3200.txt +"age" pg3200.txt +"ages pg3200.txt +"agreed." pg3200.txt +"ah! pg31100.txt, pg3200.txt +"ah, pg3200.txt +"ain't pg3200.txt +"alabama pg3200.txt +"alack, pg3200.txt +"alas! pg31100.txt +"alas!" pg3200.txt +"alas!--alas!--a--alas! pg3200.txt +"alas!-ka." pg3200.txt +"all pg3200.txt +"allen"--a pg3200.txt +"almost pg31100.txt +"alone." pg3200.txt +"altogether pg31100.txt +"always!" pg3200.txt +"am pg3200.txt +"amen" pg100.txt +"amen"? pg100.txt +"amen," pg100.txt +"amen." pg3200.txt +"american pg3200.txt +"american" pg3200.txt +"american." pg3200.txt +"an pg31100.txt, pg3200.txt +"an" pg3200.txt +"ancient." pg3200.txt +"and pg31100.txt, pg3200.txt +"and." pg3200.txt +"andy's pg3200.txt +"anglesy." pg3200.txt +"another pg31100.txt, pg3200.txt +"answer" pg3200.txt +"apartments"--more pg3200.txt +"appeal pg3200.txt +"apple!" pg3200.txt +"approve" pg3200.txt +"arabian pg3200.txt +"are pg31100.txt, pg3200.txt +"argumenting pg3200.txt +"arkansas." pg3200.txt +"arkansaw." pg3200.txt +"around pg3200.txt +"as pg31100.txt, pg3200.txt +"as-is". pg100.txt +"ask pg31100.txt +"assessments" pg3200.txt +"at pg31100.txt, pg3200.txt +"attending pg31100.txt +"attributes?" pg3200.txt +"atwater" pg3200.txt +"babu"--i pg3200.txt +"balaam's pg3200.txt +"balloon-voyages." pg3200.txt +"bang!" pg3200.txt +"barlow" pg3200.txt +"barstow pg3200.txt +"be pg3200.txt +"bearer" pg3200.txt +"because pg31100.txt, pg3200.txt +"bedouins!" pg3200.txt +"before pg3200.txt +"began pg3200.txt +"beheld pg3200.txt +"behold, pg3200.txt +"being pg31100.txt +"bells?" pg3200.txt +"ben pg3200.txt +"better"? pg100.txt +"biltong." pg3200.txt +"bingley." pg31100.txt +"bit" pg3200.txt +"bitte?" pg3200.txt +"black pg3200.txt +"blindfolded" pg3200.txt +"blood pg3200.txt +"blood!" pg3200.txt +"bloodshed!" pg3200.txt +"blue" pg3200.txt +"boggs!" pg3200.txt +"bok's pg3200.txt +"book"--nephi: pg3200.txt +"books? pg3200.txt +"brag" pg3200.txt +"brake pg3200.txt +"bread pg31100.txt +"breakfast!" pg3200.txt +"breast pg3200.txt +"breathed" pg3200.txt +"brick--bosh." pg3200.txt +"bucksheesh! pg3200.txt +"budget" pg3200.txt +"bulk" pg3200.txt +"bull pg3200.txt +"bunch-grass." pg3200.txt +"burial pg3200.txt +"burnt pg3200.txt +"business" pg3200.txt +"business." pg3200.txt +"busted." pg3200.txt +"busy?" pg3200.txt +"but pg31100.txt, pg3200.txt +"but" pg3200.txt +"but, pg31100.txt, pg3200.txt +"but--" pg3200.txt +"but----" pg3200.txt +"butchered pg3200.txt +"buzzed" pg3200.txt +"by pg31100.txt, pg3200.txt +"bzzz-z-zzz" pg3200.txt +"cabinets" pg3200.txt +"caesar"? pg100.txt +"caesar." pg100.txt +"californian" pg3200.txt +"call," pg3200.txt +"came pg31100.txt +"camels!--camels!" pg3200.txt +"can pg31100.txt +"can!" pg3200.txt +"can't!" pg3200.txt +"candlestick" pg3200.txt +"cannot pg3200.txt +"cap'n pg3200.txt +"capital pg31100.txt +"captain pg31100.txt +"carl pg3200.txt +"carminge," pg3200.txt +"cast pg3200.txt +"cast-iron pg3200.txt +"celebrated pg3200.txt +"cement!" pg3200.txt +"cert'nly." pg3200.txt +"certainly--i--" pg3200.txt +"certainly." pg31100.txt, pg3200.txt +"chambers" pg3200.txt +"chapel-of-ease!" pg3200.txt +"charge!" pg3200.txt +"charming," pg31100.txt +"cheek."--[quotation pg3200.txt +"cheer pg3200.txt +"chemical pg3200.txt +"chiv" pg3200.txt +"chivalry," pg3200.txt +"choice," pg3200.txt +"christ pg3200.txt +"church." pg3200.txt +"cipher?" pg3200.txt +"city pg3200.txt +"civilized" pg3200.txt +"clarence!" pg3200.txt +"classic." pg3200.txt +"clean-ups" pg3200.txt +"code", pg3200.txt +"coffee," pg3200.txt +"cold pg3200.txt +"columbiana," pg3200.txt +"come pg3200.txt +"come, pg31100.txt +"come," pg3200.txt +"come--quick!" pg3200.txt +"come." pg3200.txt +"communing," pg3200.txt +"communings" pg3200.txt +"compare pg3200.txt +"compose pg31100.txt +"compositions" pg3200.txt +"conductor." pg3200.txt +"conductors" pg3200.txt +"confederate" pg3200.txt +"conrad," pg3200.txt +"considered pg3200.txt +"constantinopolitanischerdudelsackspfeifenmachersgesellschafft!" pg3200.txt +"contented pg3200.txt +"continue." pg3200.txt +"corps." pg3200.txt +"correspondence?" pg3200.txt +"corridor" pg3200.txt +"corrobboree" pg3200.txt +"course." pg3200.txt +"court pg3200.txt +"cow," pg3200.txt +"cow-catcher" pg3200.txt +"cowboy. pg3200.txt +"cowboys?" pg3200.txt +"cracked pg3200.txt +"crimes." pg3200.txt +"crystal." pg3200.txt +"cub" pg3200.txt +"curious pg3200.txt +"damn!" pg3200.txt +"damnation!" pg3200.txt +"dan pg3200.txt +"dan" pg3200.txt +"dark pg31100.txt, pg3200.txt +"dead." pg3200.txt +"dead?" pg3200.txt +"deal" pg3200.txt +"dear pg31100.txt +"dears" pg31100.txt +"death"; pg3200.txt +"decreasing," pg3200.txt +"deep" pg3200.txt +"defense pg3200.txt +"delivered pg3200.txt +"demonstrated pg3200.txt +"demonstrations pg3200.txt +"den pg3200.txt +"dere pg3200.txt +"despair!" pg3200.txt +"devilish." pg3200.txt +"did pg31100.txt, pg3200.txt +"didn't pg3200.txt +"die pg3200.txt +"dig pg3200.txt +"dilworthy." pg3200.txt +"disappointment?" pg31100.txt +"discrepancies." pg3200.txt +"divide" pg3200.txt +"divine." pg3200.txt +"divining-rod," pg3200.txt +"dixie," pg3200.txt +"do pg31100.txt, pg3200.txt +"do-stop-smoking" pg3200.txt +"does pg31100.txt, pg3200.txt +"don't pg31100.txt, pg3200.txt +"don't!" pg3200.txt +"done! pg3200.txt +"dot," pg3200.txt +"down pg3200.txt +"down." pg3200.txt +"dream," pg3200.txt +"dressed." pg3200.txt +"drove pg3200.txt +"dry pg3200.txt +"dying!" pg3200.txt +"earls pg3200.txt +"early pg3200.txt +"easy!" pg3200.txt +"eclipse."' pg3200.txt +"eh pg3200.txt +"eight!" pg3200.txt +"eight--thousand!" pg3200.txt +"eight-five." pg3200.txt +"elect" pg3200.txt +"ell" pg3200.txt +"eloquent pg3200.txt +"embedded," pg3200.txt +"emigrants," pg3200.txt +"empire." pg3200.txt +"emptyings" pg3200.txt +"en pg3200.txt +"endless pg3200.txt +"engineer-in-chief" pg3200.txt +"england pg3200.txt +"england." pg3200.txt +"enjoy pg3200.txt +"enterprise." pg3200.txt +"et pg3200.txt +"even pg3200.txt +"ever-blooming pg3200.txt +"every pg31100.txt, pg3200.txt +"everybody?" pg3200.txt +"everything." pg3200.txt +"evidence." pg3200.txt +"evidences pg3200.txt +"examine." pg3200.txt +"except pg3200.txt +"excommunication" pg3200.txt +"exeter pg3200.txt +"experiences"--are pg3200.txt +"explain," pg3200.txt +"explain." pg3200.txt +"expression," pg3200.txt +"expression." pg3200.txt +"fable pg3200.txt +"face pg3200.txt +"fact," pg3200.txt +"faithful pg3200.txt +"family?" pg3200.txt +"fanny pg31100.txt +"fanny, pg31100.txt +"fast?" pg3200.txt +"father!" pg3200.txt +"features" pg3200.txt +"feeling" pg3200.txt +"feet" pg3200.txt +"fellow-citizens"; pg3200.txt +"fighting pg3200.txt +"fire!" pg3200.txt +"fire--!" pg3200.txt +"fire-assay"--a pg3200.txt +"fire-assay." pg3200.txt +"fire-coals." pg3200.txt +"fire-escape" pg3200.txt +"firm," pg3200.txt +"first pg3200.txt +"first" pg3200.txt +"first-class pg3200.txt +"five-forty-five!" pg3200.txt +"flint pg3200.txt +"flint-picker?" pg3200.txt +"flush pg3200.txt +"fly!" pg3200.txt +"fo'castle." pg3200.txt +"for pg31100.txt, pg3200.txt +"for'rard" pg3200.txt +"forbid pg3200.txt +"force." pg3200.txt +"forever." pg3200.txt +"forgive pg3200.txt +"formulas" pg3200.txt +"forty-niner." pg3200.txt +"forward!" pg3200.txt +"found pg3200.txt +"found." pg3200.txt +"fountain" pg3200.txt +"fountains," pg3200.txt +"four!" pg3200.txt +"frankness pg3200.txt +"freak" pg3200.txt +"frederica pg31100.txt +"frederick." pg31100.txt +"free pg3200.txt +"freedomolovnaivanovich." pg3200.txt +"french." pg3200.txt +"freundschaft pg3200.txt +"friend--" pg3200.txt +"friends" pg3200.txt +"from pg31100.txt, pg3200.txt +"front--face!" pg3200.txt +"frozen pg3200.txt +"full pg3200.txt +"fund" pg3200.txt +"future" pg3200.txt +"futures." pg3200.txt +"g. pg3200.txt +"gallery" pg3200.txt +"gallic." pg3200.txt +"gallus"--[excuse pg3200.txt +"game--" pg3200.txt +"gates pg3200.txt +"gatto." pg3200.txt +"genius" pg3200.txt +"gentiles" pg3200.txt +"gentlemen, pg3200.txt +"gentlemen," pg3200.txt +"gentlemen; pg3200.txt +"get pg3200.txt +"give pg3200.txt +"go!" pg3200.txt +"go!--go pg3200.txt +"go" pg3200.txt +"god pg3200.txt +"god." pg3200.txt +"goggles?" pg3200.txt +"going pg31100.txt +"going, pg3200.txt +"going--going--" pg3200.txt +"going?" pg3200.txt +"gone,--and pg3200.txt +"goo-goo" pg3200.txt +"goo-goo--goo-goo-goo" pg3200.txt +"good pg31100.txt, pg3200.txt +"good-bye pg3200.txt +"gores," pg3200.txt +"grade." pg3200.txt +"grandfather?" pg3200.txt +"grapes!" pg3200.txt +"great pg3200.txt +"great-grandfather?" pg3200.txt +"great-great-grandfather?" pg3200.txt +"greatly pg3200.txt +"grieved--shocked. pg31100.txt +"grotto" pg3200.txt +"grub" pg3200.txt +"guides pg3200.txt +"guilty." pg3200.txt +"gully-keeper" pg3200.txt +"gungadin": pg3200.txt +"gwen," pg3200.txt +"had pg31100.txt, pg3200.txt +"hades" pg3200.txt +"hail pg3200.txt +"halt!" pg3200.txt +"handiness" pg3200.txt +"hands pg3200.txt +"happy pg31100.txt +"harem." pg3200.txt +"harem?" pg3200.txt +"hark!" pg3200.txt +"harmonies pg3200.txt +"harmonious." pg3200.txt +"has pg31100.txt +"hast pg3200.txt +"hatchment." pg3200.txt +"hattie." pg3200.txt +"have pg31100.txt, pg3200.txt +"havel." pg3200.txt +"haves" pg3200.txt +"hawkins, pg3200.txt +"he pg31100.txt, pg3200.txt +"he's pg3200.txt +"he--" pg3200.txt +"head pg3200.txt +"healthy pg3200.txt +"hear pg3200.txt +"heart pg3200.txt +"heaves"; pg3200.txt +"hello!" pg3200.txt +"hello!"--which pg3200.txt +"hello-central!" pg3200.txt +"hello-girl?" pg3200.txt +"henry pg3200.txt +"her," pg3200.txt +"here pg31100.txt, pg3200.txt +"here!" pg3200.txt +"here's pg3200.txt +"here." pg3200.txt +"here?" pg3200.txt +"herewith." pg3200.txt +"heyday! pg31100.txt +"hi! pg3200.txt +"hi-hi!" pg3200.txt +"hi-yi! pg3200.txt +"hi-yi!" pg3200.txt +"high pg3200.txt +"his pg31100.txt +"history" pg3200.txt +"history," pg3200.txt +"hold pg3200.txt +"holsatia." pg3200.txt +"homely" pg3200.txt +"honour,"--glanced pg31100.txt +"hooked" pg3200.txt +"horatio pg3200.txt +"horatio," pg3200.txt +"horse--buggy--cutter--lap-robe--patent-leathers--dog--plug-hat pg3200.txt +"how pg31100.txt, pg3200.txt +"how?" pg3200.txt +"however, pg31100.txt, pg3200.txt +"huck pg3200.txt +"huckleberry pg3200.txt +"humanity." pg3200.txt +"humboldt! pg3200.txt +"hurricane" pg3200.txt +"huzza!" pg3200.txt +"hyroqwhich?" pg3200.txt +"hyste." pg3200.txt +"i pg31100.txt, pg3200.txt +"i!" pg3200.txt +"i") pg3200.txt +"i"? pg3200.txt +"i'd pg3200.txt +"i've pg3200.txt +"i--" pg3200.txt +"i--have pg3200.txt +"i--love--you!" pg3200.txt +"idiot pg3200.txt +"idiot!" pg3200.txt +"if pg31100.txt, pg3200.txt +"ilium pg3200.txt +"im-poseeeble!" pg3200.txt +"imbecility!" pg3200.txt +"imperative pg3200.txt +"imposing-stone" pg3200.txt +"impromptu," pg3200.txt +"improvement" pg3200.txt +"in pg31100.txt, pg3200.txt +"increase!" pg31100.txt +"incredible." pg3200.txt +"indeed!" pg31100.txt, pg3200.txt, pg100.txt +"indeed, pg31100.txt +"independent pg3200.txt +"indications," pg3200.txt +"indo-anglian pg3200.txt +"infidel pg3200.txt +"influence" pg3200.txt +"informed pg31100.txt +"ingots," pg3200.txt +"injun." pg3200.txt +"innocents pg3200.txt +"innocents" pg3200.txt +"insensible," pg3200.txt +"instigating" pg3200.txt +"instructed pg3200.txt +"instruction pg3200.txt +"instructions" pg3200.txt +"instructions," pg3200.txt +"insulting." pg3200.txt +"interesting" pg3200.txt +"interview." pg3200.txt +"intimately." pg31100.txt +"is pg31100.txt, pg3200.txt +"is, pg31100.txt +"isms"? pg3200.txt +"it pg31100.txt, pg3200.txt +"it!" pg3200.txt +"it's pg3200.txt +"it's") pg3200.txt +"it." pg3200.txt +"item":-- pg3200.txt +"its." pg3200.txt +"jackson." pg3200.txt +"jake." pg3200.txt +"jes' pg3200.txt +"jim!" pg3200.txt +"jimpson" pg3200.txt +"joan." pg3200.txt +"jock" pg3200.txt +"joe" pg3200.txt +"john, pg31100.txt +"journal" pg3200.txt +"joy!" pg3200.txt +"judge." pg3200.txt +"julia pg31100.txt +"jumbo." pg3200.txt +"just pg3200.txt +"keep pg3200.txt +"kick!" pg3200.txt +"killed pg3200.txt +"kingdoms" pg3200.txt +"km." pg3200.txt +"knight pg3200.txt +"l'arbre pg3200.txt +"labbord!--stabbord!--s-t-e-a-d-y!--so! pg3200.txt +"ladies," pg3200.txt +"lady pg31100.txt +"lafts pg3200.txt +"lake pg3200.txt +"land pg3200.txt +"land" pg3200.txt +"language" pg3200.txt +"lark," pg3200.txt +"lash pg3200.txt +"latin." pg3200.txt +"laura, pg3200.txt +"lay pg3200.txt +"lay" pg3200.txt +"layout" pg3200.txt +"lead pg3200.txt +"leaning pg3200.txt +"leave pg3200.txt +"les pg3200.txt +"less pg31100.txt +"let pg31100.txt, pg3200.txt +"letters pg3200.txt +"liberty," pg3200.txt +"library": pg3200.txt +"library.") pg3200.txt +"lie pg3200.txt +"life pg3200.txt +"life-size" pg3200.txt +"light pg3200.txt +"like pg3200.txt +"like" pg3200.txt +"listen!" pg3200.txt +"literature," pg3200.txt +"literature." pg3200.txt +"live" pg3200.txt +"lizzy pg31100.txt +"lo, pg3200.txt +"localized" pg3200.txt +"log-rolling." pg3200.txt +"lohengrin." pg3200.txt +"long pg3200.txt +"look!" pg3200.txt +"look--there pg3200.txt +"lord pg31100.txt +"lord, pg31100.txt +"lordy!" pg3200.txt +"loud" pg3200.txt +"louis, pg3200.txt +"louisa." pg31100.txt +"m'sieu pg3200.txt +"m-y pg3200.txt +"m-y-word!" pg3200.txt +"mais, pg3200.txt +"mal-ease." pg3200.txt +"mamma," pg31100.txt +"man pg3200.txt +"mantel-piece pg3200.txt +"map pg3200.txt +"mark pg3200.txt +"married!" pg3200.txt +"marshall"--the pg3200.txt +"mary pg3200.txt +"master, pg3200.txt +"master?" pg3200.txt +"may pg31100.txt +"may, pg3200.txt +"mayn't pg3200.txt +"maze pg3200.txt +"me!" pg3200.txt +"me." pg3200.txt +"me."--m.t.] pg3200.txt +"meat pg3200.txt +"mekkamuselmannenmassenmenchenmoerdermohrenmuttermarmormonumentenmacher!" pg3200.txt +"memorable" pg3200.txt +"memoranda" pg3200.txt +"memoranda". pg3200.txt +"memories pg3200.txt +"men pg31100.txt +"mentioned" pg3200.txt +"meretricious," pg3200.txt +"methinks pg3200.txt +"michael pg3200.txt +"might pg31100.txt +"milksop!" pg3200.txt +"millet pg3200.txt +"mine." pg3200.txt +"miracle"--a pg3200.txt +"miss pg31100.txt +"mist-er.....er pg3200.txt +"mistered" pg3200.txt +"moblirte pg3200.txt +"modification" pg3200.txt +"mohammed," pg3200.txt +"monarch pg3200.txt +"monotonous pg3200.txt +"more pg3200.txt +"most pg31100.txt, pg3200.txt +"mother pg3200.txt +"mother, pg3200.txt +"mr pg31100.txt +"mr. pg31100.txt, pg3200.txt +"mrs. pg31100.txt +"mumblepeg" pg3200.txt +"mumps." pg3200.txt +"murder," pg3200.txt +"murther!" pg100.txt +"my pg31100.txt, pg3200.txt +"my"? pg3200.txt +"mystery" pg3200.txt +"n--no." pg3200.txt +"n-no." pg3200.txt +"name?" pg3200.txt +"nasty." pg3200.txt +"natchez." pg3200.txt +"nation" pg3200.txt +"ne-ver?" pg3200.txt +"never pg31100.txt +"never-never pg3200.txt +"never." pg100.txt +"next!" pg3200.txt +"next." pg3200.txt +"nigger" pg3200.txt +"nihilistendynamittheaterkaestchenssprengungsattentaetsversuchungen!" pg3200.txt +"no pg31100.txt, pg3200.txt +"no!!!" pg3200.txt +"no!" pg3200.txt +"no'm." pg3200.txt +"no, pg31100.txt, pg3200.txt +"no--dead." pg3200.txt +"no--nothing." pg3200.txt +"no." pg31100.txt, pg3200.txt +"no; pg3200.txt +"no?" pg3200.txt +"noble pg3200.txt +"nobody pg3200.txt +"nobody." pg3200.txt +"none." pg3200.txt +"nonsense, pg31100.txt +"not pg3200.txt +"notes," pg3200.txt +"noth'n!" pg3200.txt +"noth'n." pg3200.txt +"nothing pg31100.txt, pg3200.txt +"nothing." pg3200.txt +"notice" pg3200.txt +"notice." pg3200.txt +"notish." pg3200.txt +"now pg3200.txt +"now!" pg3200.txt +"now, pg31100.txt, pg3200.txt +"now." pg3200.txt +"nub" pg3200.txt +"o pg3200.txt +"o." pg3200.txt +"obsequies." pg3200.txt +"of pg31100.txt, pg3200.txt +"oftentimes pg31100.txt +"oh pg3200.txt +"oh! pg31100.txt +"oh!" pg31100.txt +"oh, pg31100.txt, pg3200.txt +"oh," pg3200.txt +"oh--exactly." pg3200.txt +"oils." pg3200.txt +"old pg31100.txt, pg3200.txt +"old" pg3200.txt +"on pg31100.txt, pg3200.txt +"one pg31100.txt +"one--!" pg3200.txt +"one--two--three--fire!" pg3200.txt +"open pg3200.txt +"opera," pg3200.txt +"or pg31100.txt, pg3200.txt +"orange!" pg3200.txt +"ordained pg3200.txt +"order pg3200.txt +"order! pg3200.txt +"order--order pg3200.txt +"othello." pg3200.txt +"ouch!" pg3200.txt +"our pg3200.txt +"out pg31100.txt +"outside" pg3200.txt +"outside," pg3200.txt +"over pg3200.txt +"over!" pg3200.txt +"over" pg3200.txt +"pacific pg3200.txt +"pads?" pg3200.txt +"palace." pg3200.txt +"papa." pg3200.txt +"parsifal" pg3200.txt +"parsifal," pg3200.txt +"passionately." pg3200.txt +"payments pg3200.txt +"peculiarly pg3200.txt +"people pg31100.txt +"perfectly." pg3200.txt +"perhaps pg31100.txt, pg3200.txt +"perhaps" pg3200.txt +"permit." pg3200.txt +"perpetuity." pg3200.txt +"phenomena," pg3200.txt +"philip, pg3200.txt +"philippa." pg31100.txt +"phillips." pg3200.txt +"phwere's pg3200.txt +"pillar pg3200.txt +"pinafore's" pg3200.txt +"place pg3200.txt +"placer"; pg3200.txt +"plains pg3200.txt +"please, pg3200.txt +"plough" pg3200.txt +"plow, pg3200.txt +"po' pg3200.txt +"pocket pg3200.txt +"poem." pg3200.txt +"poetry" pg3200.txt +"points" pg3200.txt +"political pg3200.txt +"political-- pg3200.txt +"pollo pg3200.txt +"pollution." pg3200.txt +"pone" pg3200.txt +"pone," pg3200.txt +"poor"-- pg3200.txt +"poppa" pg3200.txt +"possibly." pg3200.txt +"pray pg3200.txt +"preliminaries" pg3200.txt +"presbyterian" pg3200.txt +"presume" pg3200.txt +"pretending" pg3200.txt +"pretty." pg3200.txt +"previously pg3200.txt +"primaries,") pg3200.txt +"private" pg3200.txt +"pro- pg100.txt +"proceed pg3200.txt +"proceed." pg3200.txt +"proclamation pg3200.txt +"prodigal." pg3200.txt +"professors" pg3200.txt +"project pg31100.txt, pg3200.txt, pg100.txt +"prophecies" pg3200.txt +"prospecting." pg3200.txt +"providence's pg3200.txt +"puffs," pg3200.txt +"put pg3200.txt +"q." pg3200.txt +"quaker pg3200.txt +"quarter-her----" pg3200.txt +"queen's pg3200.txt +"rage!" pg3200.txt +"railroad pg3200.txt +"raised pg3200.txt +"rambler," pg3200.txt +"ranch." pg3200.txt +"rather pg31100.txt +"reader," pg3200.txt +"ready pg31100.txt +"ready! pg3200.txt +"ready." pg3200.txt +"ready?" pg3200.txt +"records." pg3200.txt +"recruiting," pg3200.txt +"recruits" pg3200.txt +"reform!" pg3200.txt +"regular" pg3200.txt +"regulars" pg3200.txt +"remain." pg3200.txt +"reminiscences pg3200.txt +"reminiscences" pg3200.txt +"removed pg3200.txt +"reply" pg3200.txt +"reserved!--how, pg31100.txt +"resign?" pg3200.txt +"responding pg3200.txt +"resting," pg3200.txt +"return pg3200.txt +"revelator." pg3200.txt +"revenge!" pg3200.txt +"right pg31100.txt, pg3200.txt, pg100.txt +"right." pg3200.txt +"righteous pg3200.txt +"righteousness"--is pg3200.txt +"rinalds" pg3200.txt +"ring-taw" pg3200.txt +"rise!" pg3200.txt +"river pg3200.txt +"river" pg3200.txt +"rivers" pg3200.txt +"rooms," pg3200.txt +"rosannah!" pg3200.txt +"rossmore!" pg3200.txt +"rot" pg3200.txt +"rough," pg3200.txt +"rumors," pg3200.txt +"rush." pg3200.txt +"rushing pg3200.txt +"russell's pg3200.txt +"rusticated?" pg3200.txt +"ruth." pg3200.txt +"ruth?" pg3200.txt +"sad, pg31100.txt +"sage-brush," pg3200.txt +"sage-tea" pg3200.txt +"salted"--and pg3200.txt +"salting" pg3200.txt +"sammy" pg3200.txt +"samples?" pg3200.txt +"sandwich pg3200.txt +"sapling" pg3200.txt +"satan!" pg3200.txt +"saturday, pg3200.txt +"say pg31100.txt +"say, pg3200.txt +"say--aleck?" pg3200.txt +"scandalously pg3200.txt +"schreiber." pg3200.txt +"schweitzerhof." pg3200.txt +"science" pg3200.txt +"scoop" pg3200.txt +"scribe." pg3200.txt +"scylla" pg3200.txt +"season" pg3200.txt +"second pg3200.txt +"see pg3200.txt +"see, pg3200.txt +"select." pg3200.txt +"selfish?" pg31100.txt +"sell pg3200.txt +"selling pg3200.txt +"sensation!" pg3200.txt +"sensible, pg31100.txt +"sent pg3200.txt +"separate pg3200.txt +"set pg3200.txt +"settle." pg3200.txt +"seven-ana-half!" pg3200.txt +"seventy-two." pg3200.txt +"sh! pg3200.txt +"sh!" pg3200.txt +"sh--" pg3200.txt +"shall pg31100.txt, pg3200.txt +"shall!" pg3200.txt +"she pg31100.txt, pg3200.txt +"she's pg3200.txt +"shear" pg3200.txt +"ship" pg3200.txt +"shoved". pg3200.txt +"shoved." pg3200.txt +"showing pg3200.txt +"shrank pg3200.txt +"shrill pg3200.txt +"siberia!" pg3200.txt +"sick pg3200.txt +"sight"; pg3200.txt +"sights" pg3200.txt +"sign" pg3200.txt +"signature, pg3200.txt +"signing pg3200.txt +"signs." pg3200.txt +"silence!" pg3200.txt +"sincerely pg3200.txt +"singing pg3200.txt +"sink" pg3200.txt +"sir pg31100.txt +"sir!" pg3200.txt +"sir" pg3200.txt +"sir, pg3200.txt +"sir?" pg3200.txt +"sister pg3200.txt +"situation," pg3200.txt +"situation." pg3200.txt +"six--feet--scant!" pg3200.txt +"size" pg3200.txt +"skull-hunters" pg3200.txt +"slender." pg3200.txt +"slouch" pg3200.txt +"slumgullion." pg3200.txt +"smells": pg3200.txt +"smelt" pg3200.txt +"snodgrass"-- pg3200.txt +"snodgrass"--saturday pg3200.txt +"so pg31100.txt, pg3200.txt +"so-so." pg3200.txt +"sociables" pg3200.txt +"sold." pg3200.txt +"some pg3200.txt +"some--yes." pg3200.txt +"something pg31100.txt +"soon pg3200.txt +"spare pg3200.txt +"speak pg3200.txt +"speak." pg3200.txt +"specimen pg3200.txt +"specimens," pg3200.txt +"spectacular pg3200.txt +"splendid!" pg3200.txt +"spoil" pg3200.txt +"spring" pg3200.txt +"stand-up." pg3200.txt +"stanning!" pg3200.txt +"starter." pg3200.txt +"steeds," pg3200.txt +"stick pg3200.txt +"stingy pg3200.txt +"stoning.") pg3200.txt +"stooped"; pg3200.txt +"stormfield" pg3200.txt +"stormfield." pg3200.txt +"storms," pg3200.txt +"stovepipe" pg3200.txt +"strange." pg31100.txt +"street pg3200.txt +"stretches"--stretches pg3200.txt +"strike pg3200.txt +"stub-hunters" pg3200.txt +"stuck!" pg3200.txt +"studying pg3200.txt +"stuff!" pg3200.txt +"stupid, pg31100.txt +"style," pg3200.txt +"subdued"; pg3200.txt +"subscriber." pg3200.txt +"such pg3200.txt +"suicide pg3200.txt +"sumter" pg3200.txt +"sundowner" pg3200.txt +"sundries," pg3200.txt +"sunflower"--' pg3200.txt +"superior pg31100.txt +"supplement" pg3200.txt +"supplements." pg3200.txt +"suppose pg31100.txt +"suppositions," pg3200.txt +"sure pg31100.txt +"sure?" pg3200.txt +"surely pg31100.txt, pg3200.txt +"surveys pg3200.txt +"suspicious"; pg3200.txt +"swedish pg3200.txt +"sweetheart?" pg3200.txt +"tah-rah-rah pg3200.txt +"take pg31100.txt, pg3200.txt +"taken pg3200.txt +"takings" pg3200.txt +"taps"--translatable, pg3200.txt +"tare pg3200.txt +"technique-- pg3200.txt +"tell pg3200.txt +"terrible pg3200.txt +"tf" pg3200.txt +"thank pg31100.txt +"thanks," pg3200.txt +"thanks." pg3200.txt +"thanks?" pg3200.txt +"that pg31100.txt, pg3200.txt +"that's pg31100.txt, pg3200.txt +"that, pg3200.txt +"that? pg3200.txt +"the pg31100.txt, pg3200.txt +"the" pg3200.txt +"thee" pg3200.txt +"theft?" pg3200.txt +"theirs--" pg3200.txt +"then pg31100.txt, pg3200.txt +"then" pg100.txt +"there pg31100.txt, pg3200.txt +"there's pg3200.txt +"there, pg31100.txt +"there--there! pg3200.txt +"therefore pg3200.txt +"therefore--" pg3200.txt +"these pg31100.txt, pg3200.txt +"they pg31100.txt, pg3200.txt +"thieves,"--whereupon pg3200.txt +"think." pg3200.txt +"thirteen." pg3200.txt +"thirty pg3200.txt +"this pg31100.txt, pg3200.txt +"thomas." pg3200.txt +"thoroughbrace" pg3200.txt +"thou pg3200.txt +"though pg31100.txt +"thousand" pg3200.txt +"three pg3200.txt +"three!" pg3200.txt +"three--"! pg3200.txt +"thrill pg3200.txt +"thrusting" pg100.txt +"ticknor pg3200.txt +"to pg31100.txt, pg3200.txt +"to" pg3200.txt +"tom pg3200.txt +"tom!" pg3200.txt +"tom, pg3200.txt +"tom." pg3200.txt +"tomahawked." pg3200.txt +"too pg3200.txt +"took pg3200.txt +"tracker," pg3200.txt +"transients." pg3200.txt +"transvaaltruppentropentransporttrampelthiertreibertrauungsthraenen- pg3200.txt +"trash pg3200.txt +"travel" pg3200.txt +"tribute." pg3200.txt +"trifling." pg3200.txt +"trouble pg3200.txt +"truly pg3200.txt +"trust."] pg3200.txt +"tube," pg3200.txt +"tuesday." pg3200.txt +"tuition pg3200.txt +"turner"] pg3200.txt +"turner." pg3200.txt +"twas pg100.txt +"twenty-five." pg3200.txt +"two--!" pg3200.txt +"two-year-olds" pg3200.txt +"unanimous?" pg3200.txt +"unbelief, pg3200.txt +"uncle pg3200.txt +"uncle" pg3200.txt +"under pg3200.txt +"unique"; pg3200.txt +"universe," pg3200.txt +"unperfected pg3200.txt +"unquestionablys," pg3200.txt +"unsophisticated," pg3200.txt +"untruth." pg3200.txt +"unwearied pg31100.txt +"unworthy pg31100.txt +"vacation" pg3200.txt +"vagabond"--the pg3200.txt +"venice!" pg3200.txt +"verily pg3200.txt +"very pg31100.txt, pg3200.txt +"very." pg31100.txt +"view." pg3200.txt +"virtue pg3200.txt +"wair pg3200.txt +"wait! pg3200.txt +"wake" pg3200.txt +"wanted pg3200.txt +"wants pg3200.txt +"wardrobe?" pg3200.txt +"warranty pg3200.txt +"was pg31100.txt, pg3200.txt +"wawkawampanoosucwinnebayowallazvsagamoresa- pg3200.txt +"way pg3200.txt +"we pg31100.txt, pg3200.txt +"we'll pg3200.txt +"we've pg3200.txt +"weaver." pg3200.txt +"webster." pg3200.txt +"wegen pg3200.txt +"welcome pg3200.txt +"well, pg31100.txt, pg3200.txt +"well," pg31100.txt +"well--could pg3200.txt +"well--no." pg3200.txt +"well--yes." pg3200.txt +"well-good-bye." pg3200.txt +"well?" pg31100.txt, pg3200.txt +"went pg3200.txt +"what pg31100.txt, pg3200.txt +"what!" pg3200.txt +"what's pg3200.txt +"what--regular?" pg3200.txt +"what--you?" pg3200.txt +"what?" pg3200.txt +"whatever pg31100.txt +"when pg31100.txt, pg3200.txt +"when?" pg3200.txt +"where pg31100.txt, pg3200.txt +"where, pg3200.txt +"where?" pg3200.txt +"wherefore"? pg3200.txt +"wherever pg31100.txt +"whether pg31100.txt +"whew!" pg3200.txt +"which pg31100.txt, pg3200.txt +"which, pg31100.txt +"which?" pg3200.txt +"while pg31100.txt +"whistle pg3200.txt +"who pg3200.txt +"who" pg3200.txt +"who?" pg3200.txt +"whoever pg31100.txt +"whore." pg100.txt +"whose pg3200.txt +"why pg31100.txt, pg3200.txt +"why, pg31100.txt, pg3200.txt +"why--no." pg3200.txt +"why?" pg3200.txt +"wickham pg31100.txt +"will pg31100.txt, pg3200.txt +"william pg31100.txt, pg3200.txt +"willow"; pg100.txt +"winnemucca," pg3200.txt +"with pg31100.txt, pg3200.txt +"without pg3200.txt +"woman" pg3200.txt +"woodston pg31100.txt +"words pg3200.txt +"worse pg31100.txt +"would pg31100.txt +"write?" pg3200.txt +"yankee" pg3200.txt +"ye pg3200.txt +"ye--yes." pg3200.txt +"ye-s." pg3200.txt +"yes!" pg3200.txt +"yes" pg3200.txt +"yes'm--she--" pg3200.txt +"yes'm." pg3200.txt +"yes, pg31100.txt, pg3200.txt +"yes," pg31100.txt +"yes--indeedy." pg3200.txt +"yes--middlin'." pg3200.txt +"yes--perfect." pg3200.txt +"yes--perfectly." pg3200.txt +"yes--regular." pg3200.txt +"yes--sometimes." pg3200.txt +"yes. pg31100.txt +"yes." pg31100.txt, pg3200.txt +"yet pg3200.txt +"you pg31100.txt, pg3200.txt +"you're pg3200.txt +"you've pg3200.txt +"you?" pg3200.txt +"young pg3200.txt +"your pg31100.txt, pg3200.txt +"zylobalsamum." pg3200.txt +#10000, pg100.txt +#100] pg100.txt +#31100] pg31100.txt +#3200] pg3200.txt +$1,000 pg3200.txt +$1,000, pg3200.txt +$1,000. pg3200.txt +$1,200 pg3200.txt +$1,265,215,217. pg3200.txt +$1,500 pg3200.txt +$1,500,000. pg3200.txt +$1.50. pg3200.txt +$10, pg3200.txt +$10,000 pg3200.txt +$10,000--along pg3200.txt +$10,000." pg3200.txt +$10,000; pg3200.txt +$100 pg3200.txt +$100,000 pg3200.txt +$100,000, pg3200.txt +$100,000. pg3200.txt +$1000; pg3200.txt +$105,000, pg3200.txt +$110, pg3200.txt +$118,000 pg3200.txt +$12 pg3200.txt +$12,000 pg3200.txt +$12,000,000. pg3200.txt +$12,500. pg3200.txt +$120,000. pg3200.txt +$130,000--is pg3200.txt +$15,000, pg3200.txt +$15,000. pg3200.txt +$150,000, pg3200.txt +$150.00 pg3200.txt +$16 pg3200.txt +$16,000 pg3200.txt +$162,000 pg3200.txt +$17,000 pg3200.txt +$17,000. pg3200.txt +$17,870.94. pg3200.txt +$170,000 pg3200.txt +$18,000. pg3200.txt +$19 pg3200.txt +$191,000 pg3200.txt +$2,000 pg3200.txt +$2,600 pg3200.txt +$2,800 pg3200.txt +$2,986 pg3200.txt +$20,000,000.] pg3200.txt +$200 pg3200.txt +$200,000 pg3200.txt +$200. pg3200.txt +$21,200,000 pg3200.txt +$21.70 pg3200.txt +$22.50. pg3200.txt +$2400 pg3200.txt +$25 pg3200.txt +$25,000. pg3200.txt +$250,000--and pg3200.txt +$27,875.83! pg3200.txt +$28,000,000. pg3200.txt +$2;400 pg3200.txt +$3 pg3200.txt +$3,000 pg3200.txt +$3,000, pg3200.txt +$3,000. pg3200.txt +$3. pg3200.txt +$30,000; pg3200.txt +$300 pg3200.txt +$300,000 pg3200.txt +$300,000,000. pg3200.txt +$300. pg3200.txt +$325,000 pg3200.txt +$350,000 pg3200.txt +$36 pg3200.txt +$37.50 pg3200.txt +$375 pg3200.txt +$38,500, pg3200.txt +$4,000 pg3200.txt +$4,000,000. pg3200.txt +$40,000 pg3200.txt +$400 pg3200.txt +$400,000 pg3200.txt +$45,000 pg3200.txt +$45,553,700. pg3200.txt +$5,000 pg3200.txt +$5,000." pg3200.txt +$50 pg3200.txt +$50, pg3200.txt +$50-- pg3200.txt +$500 pg3200.txt +$500, pg3200.txt +$500,000,000. pg3200.txt +$500,000; pg3200.txt +$500. pg3200.txt +$52,000,000. pg3200.txt +$600 pg3200.txt +$600. pg3200.txt +$7,960?" pg3200.txt +$73,000,000 pg3200.txt +$75 pg3200.txt +$75,000. pg3200.txt +$750.00 pg3200.txt +$8,000 pg3200.txt +$8,500 pg3200.txt +$8,630 pg3200.txt +$8,873--the pg3200.txt +$800,000; pg3200.txt +$8997.94; pg3200.txt +$9 pg3200.txt +$9,610 pg3200.txt +$900 pg3200.txt +& pg3200.txt +&c, pg31100.txt, pg3200.txt +&c. pg31100.txt, pg3200.txt, pg100.txt +&c.") pg3200.txt +&c.' pg100.txt +&c., pg31100.txt, pg3200.txt +&c.," pg31100.txt +&c; pg31100.txt +' pg3200.txt +'"no!" pg3200.txt +'"speak!" pg3200.txt +'"when?" pg3200.txt +'"yes." pg3200.txt +'----------!' pg3200.txt +'00. pg3200.txt +'01. pg3200.txt +'02. pg3200.txt +'03. pg3200.txt +'04 pg3200.txt +'04. pg3200.txt +'05. pg3200.txt +'06. pg3200.txt +'07 pg3200.txt +'07. pg3200.txt +'08. pg3200.txt +'09. pg3200.txt +'10. pg3200.txt +'26. pg3200.txt +'44 pg3200.txt +'50, pg3200.txt +'53. pg3200.txt +'56. pg3200.txt +'63. pg3200.txt +'63; pg3200.txt +'64. pg3200.txt +'66. pg3200.txt +'67-8, pg3200.txt +'67. pg3200.txt +'68. pg3200.txt +'70. pg3200.txt +'71. pg3200.txt +'72, pg3200.txt +'73. pg3200.txt +'74. pg3200.txt +'75. pg3200.txt +'76. pg3200.txt +'77. pg3200.txt +'78. pg3200.txt +'79. pg3200.txt +'80. pg3200.txt +'81. pg3200.txt +'82. pg3200.txt +'83. pg3200.txt +'84-'85. pg3200.txt +'84. pg3200.txt +'85. pg3200.txt +'86. pg3200.txt +'87 pg3200.txt +'87. pg3200.txt +'88. pg3200.txt +'89. pg3200.txt +'90 pg3200.txt +'90. pg3200.txt +'91 pg3200.txt +'91. pg3200.txt +'92. pg3200.txt +'93 pg3200.txt +'93, pg3200.txt +'93. pg3200.txt +'94 pg3200.txt +'94. pg3200.txt +'95. pg3200.txt +'96. pg3200.txt +'97. pg3200.txt +'98. pg3200.txt +'99 pg3200.txt +'99. pg3200.txt +'_patronized pg3200.txt +'_she_ pg31100.txt +'a pg3200.txt, pg100.txt +'a' pg3200.txt +'a? pg100.txt +'about pg3200.txt +'above pg3200.txt +'accost'? pg100.txt +'accurs'd pg100.txt +'address pg31100.txt +'affected.' pg100.txt +'affection's pg3200.txt +'ah, pg3200.txt +'alas!' pg100.txt +'alas'? pg100.txt +'all'?" pg3200.txt +'almost pg3200.txt +'alone.' pg100.txt +'alta-california', pg3200.txt +'amen' pg100.txt +'amen.' pg100.txt +'ammer, pg3200.txt +'an pg100.txt +'an' pg3200.txt +'and pg3200.txt +'angelus' pg3200.txt +'antony' pg100.txt +'anybody,' pg3200.txt +'arnest?" pg3200.txt +'as pg3200.txt +'as,' pg3200.txt +'as-is' pg31100.txt, pg3200.txt +'as-is,' pg100.txt +'assembly'; pg3200.txt +'astonishing!' pg3200.txt +'at pg3200.txt +'augusta.' pg31100.txt +'away!' pg3200.txt, pg100.txt +'ay' pg100.txt +'ay'; pg100.txt +'ay.' pg100.txt +'baa.' pg100.txt +'baby' pg100.txt +'bahsket,' pg3200.txt +'baker's pg3200.txt +'banished'? pg100.txt +'banished,' pg100.txt +'banishment' pg100.txt +'banishment,' pg100.txt +'banishment.' pg100.txt +'basket' pg3200.txt +'bearing pg3200.txt +'began pg3200.txt +'behold!' pg100.txt +'benedictus.' pg100.txt +'ber pg3200.txt +'bitte?' pg3200.txt +'blackguard!' pg3200.txt +'blatherskite!' pg3200.txt +'blown pg31100.txt +'blue pg3200.txt +'bobby pg3200.txt +'bolwoggoly'?" pg3200.txt +'bopple'?" pg3200.txt +'bopple,' pg3200.txt +'boston pg3200.txt +'bout pg3200.txt +'bout. pg3200.txt +'bout? pg3200.txt +'bout?" pg3200.txt +'boy'! pg100.txt +'break;' pg3200.txt +'brothel-daddy!' pg3200.txt +'brothel-knight!' pg3200.txt +'brother's' pg3200.txt +'brothers, pg3200.txt +'budget,' pg100.txt +'burgess.'" pg3200.txt +'burnt pg3200.txt +'business.' pg3200.txt +'but pg31100.txt, pg3200.txt +'but, pg3200.txt +'by pg100.txt +'cagion. pg100.txt +'canaille.' pg3200.txt +'canvassing.' pg3200.txt +'capable'; pg3200.txt +'casion pg3200.txt +'ceres.' pg100.txt +'cerns pg100.txt +'certain,' pg100.txt +'certainly.' pg3200.txt +'chapel':-- pg3200.txt +'charge!' pg3200.txt +'charge,' pg3200.txt +'cholee' pg3200.txt +'chronique pg3200.txt +'chuprassy'. pg3200.txt +'clime' pg3200.txt +'clubs!' pg100.txt +'combine' pg3200.txt +'come pg3200.txt +'confiscated'. pg3200.txt +'conversation' pg3200.txt +'conversation.' pg3200.txt +'convinced,' pg3200.txt +'coriolanus' pg100.txt +'court pg3200.txt +'cripple pg3200.txt +'crore'. pg3200.txt +'cross-garter'd?' pg100.txt +'crystal.' pg3200.txt +'cuckoo; pg100.txt +'cuspadores' pg3200.txt +'d pg3200.txt +'dam--nation! pg3200.txt +'death!' pg100.txt +'death'; pg100.txt +'deductions' pg3200.txt +'deerslayer,' pg3200.txt +'degrees.'" pg3200.txt +'delia,' pg3200.txt +'demonstrated pg3200.txt +'depend pg31100.txt +'dingblatter'? pg3200.txt +'diplomacy' pg3200.txt +'do pg31100.txt, pg3200.txt +'don't pg3200.txt +'doubt'; pg100.txt +'dout pg3200.txt +'dove'; pg100.txt +'drawing pg3200.txt +'drifting pg3200.txt +'ducdame'? pg100.txt +'dye pg3200.txt +'e.b.' pg3200.txt +'ease pg3200.txt +'eat! pg3200.txt +'eclipse's' pg3200.txt +'edgar.' pg100.txt +'em pg3200.txt, pg100.txt +'em! pg100.txt +'em!" pg3200.txt +'em, pg3200.txt, pg100.txt +'em- pg100.txt +'em--dey's--" pg3200.txt +'em. pg3200.txt, pg100.txt +'em." pg3200.txt +'em; pg3200.txt, pg100.txt +'em? pg3200.txt, pg100.txt +'em?" pg3200.txt +'england pg100.txt +'enterprise'. pg3200.txt +'establishment' pg3200.txt +'establishment'?" pg3200.txt +'exact' pg3200.txt +'excellent! pg100.txt +'explain.' pg3200.txt +'false pg3200.txt +'fanny' pg31100.txt +'fanny,' pg31100.txt +'farewell.' pg100.txt +'fastes'. pg3200.txt +'fasting, pg3200.txt +'fee pg3200.txt +'female pg3200.txt +'few,' pg31100.txt +'finesse.' pg3200.txt +'fire pg3200.txt +'fireside pg3200.txt +'for pg3200.txt, pg100.txt +'ford pg3200.txt +'fore pg3200.txt +'francis!' pg100.txt +'front! pg3200.txt +'gainst pg100.txt +'genre?'" pg3200.txt +'gentleman pg3200.txt +'gentleman'- pg100.txt +'gentlemen, pg3200.txt +'george! pg3200.txt +'get pg3200.txt +'gin." pg3200.txt +'git pg3200.txt +'globe- pg3200.txt +'gnillic'? pg3200.txt +'go pg3200.txt +'go,' pg100.txt +'god pg3200.txt +'goddamn.'" pg3200.txt +'golden- pg3200.txt +'gonderil,' pg3200.txt +'goneril.' pg100.txt +'good pg100.txt +'got pg3200.txt +'grace' pg100.txt +'grand pg3200.txt +'great--' pg3200.txt +'greeing, pg100.txt +'guns, pg100.txt +'h.' pg3200.txt +'had,' pg100.txt +'half pg3200.txt +'hamlet.' pg100.txt +'hard?' pg3200.txt +'have pg100.txt +'he pg3200.txt +'heartless.' pg3200.txt +'heeled,' pg3200.txt +'heigh-ho pg100.txt +'henry!'] pg100.txt +'here! pg3200.txt +'here, pg3200.txt +'hic pg100.txt +'hill.'" pg3200.txt +'history pg3200.txt +'ho!' pg100.txt +'hobby-horse'? pg100.txt +'hold pg3200.txt +'hornet' pg3200.txt +'hornet.' pg3200.txt +'how pg3200.txt +'hurricane' pg3200.txt +'husband'; pg100.txt +'i pg31100.txt, pg3200.txt +'i'; pg100.txt +'i,' pg100.txt +'i.' pg100.txt +'ice.' pg3200.txt +'if pg3200.txt, pg100.txt +'im?" pg3200.txt +'in pg31100.txt, pg3200.txt +'iniquitous pg3200.txt +'inprimis: pg100.txt +'isms?' pg3200.txt +'it pg31100.txt, pg3200.txt, pg100.txt +'it's pg3200.txt +'je pg3200.txt +'juliet.' pg100.txt +'kase pg3200.txt +'kept pg3200.txt +'king' pg3200.txt +'knife,' pg100.txt +'know' pg3200.txt +'l'homme pg3200.txt +'lady pg31100.txt +'lady,' pg3200.txt +'larum pg100.txt +'larum-bell? pg100.txt +'lately, pg31100.txt +'lead' pg3200.txt +'learn' pg3200.txt +'lection--" pg3200.txt +'legitimate'! pg100.txt +'let pg3200.txt +'life pg3200.txt +'like pg3200.txt +'little'! pg100.txt +'ll pg3200.txt +'long pg3200.txt +'longs pg100.txt +'longside pg3200.txt +'loo, pg100.txt +'looky pg3200.txt +'lordship.' pg100.txt +'love pg3200.txt +'love' pg100.txt +'lowin' pg3200.txt +'m pg3200.txt +'m'm pg3200.txt +'m." pg3200.txt +'m?" pg3200.txt +'made pg3200.txt +'mary!' pg3200.txt +'may.' pg100.txt +'menage,' pg3200.txt +'merely'?" pg3200.txt +'mid-afternoon.' pg3200.txt +'modify' pg3200.txt +'mongst pg3200.txt +'mortimer.' pg100.txt +'most pg3200.txt +'most." pg3200.txt +'mother' pg100.txt +'mother, pg3200.txt +'mud pg3200.txt +'mulberry pg3200.txt +'mum' pg100.txt +'n pg3200.txt +'n' pg3200.txt +'naked!' pg100.txt +'napoleon.' pg3200.txt +'naso' pg100.txt +'navigator,' pg3200.txt +'never pg3200.txt +'no pg3200.txt +'no' pg100.txt +'no, pg31100.txt, pg3200.txt +'no,' pg31100.txt, pg100.txt +'no--suicide.' pg3200.txt +'no.' pg3200.txt, pg100.txt +'no?'" pg31100.txt +'noddy.' pg100.txt +'nointed pg100.txt +'note pg3200.txt +'now pg3200.txt +'now,' pg3200.txt +'nub' pg3200.txt +'nub'-- pg3200.txt +'nuff pg3200.txt +'o pg3200.txt, pg100.txt +'o!' pg100.txt +'o, pg100.txt +'object pg3200.txt +'often.'" pg3200.txt +'oh, pg3200.txt +'oh,' pg3200.txt +'old; pg100.txt +'on pg3200.txt +'one pg3200.txt +'one, pg3200.txt +'one.' pg100.txt +'only pg3200.txt +'or pg100.txt +'outsider.' pg3200.txt +'p'r'aps pg3200.txt +'palaces.' pg3200.txt +'pan pg3200.txt +'pear pg3200.txt +'peared pg3200.txt +'pebbled pg3200.txt +'pedestrian.'" pg3200.txt +'pennsylvania' pg3200.txt +'pennsylvania'--the pg3200.txt +'pennsylvania.' pg3200.txt +'people pg3200.txt +'perceive pg100.txt +'pilot-town,' pg3200.txt +'plenty.' pg3200.txt +'point,' pg3200.txt +'policeman,' pg3200.txt +'posted.' pg3200.txt +'pound' pg31100.txt +'prentice-like. pg3200.txt +'prentices{1}--that pg3200.txt +'previous'!" pg3200.txt +'providence pg3200.txt +'purchase' pg3200.txt +'quick, pg3200.txt +'quickness pg3200.txt +'reach' pg3200.txt +'recherche'. pg3200.txt +'reckon.' pg3200.txt +'recognition pg3200.txt +'rickshas, pg3200.txt +'rising.' pg3200.txt +'robinson pg3200.txt +'romeo,' pg100.txt +'rossmore'?" pg3200.txt +'round pg3200.txt +'ruffler,' pg3200.txt +'s pg3200.txt +'s, pg100.txt +'s-t-e- pg3200.txt +'s. pg100.txt +'sallet' pg100.txt +'saul, pg3200.txt +'sblood, pg100.txt +'scape, pg100.txt +'scape-pipes pg3200.txt +'scape. pg100.txt +'scaped. pg100.txt +'scaped; pg100.txt +'schnawp'?" pg3200.txt +'scoundrel!' pg3200.txt +'sdeath! pg100.txt +'seems.' pg100.txt +'setting' pg3200.txt +'sh! pg3200.txt +'shall pg3200.txt +'shall'! pg100.txt +'shall'." pg3200.txt +'shall'; pg3200.txt +'shall'? pg100.txt +'shall,' pg100.txt +'she pg3200.txt +'sid pg3200.txt +'sir-reverence.' pg100.txt +'sir.' pg31100.txt, pg3200.txt +'sirrah'? pg100.txt +'sleep pg3200.txt +'snow.'" pg3200.txt +'so pg3200.txt +'some pg3200.txt, pg100.txt +'soul-moving'--interesting pg3200.txt +'spare pg100.txt +'spec pg3200.txt +'spec. pg3200.txt +'species pg3200.txt +'specimens'--quartz, pg3200.txt +'spect." pg3200.txt +'sport' pg3200.txt +'squire, pg31100.txt +'stand pg3200.txt +'stand!' pg100.txt +'statutes pg3200.txt +'stavely's pg3200.txt +'stead pg3200.txt +'stephen.' pg3200.txt +'still, pg3200.txt +'store' pg3200.txt +'straight pg3200.txt +'straightened pg3200.txt +'submit pg3200.txt +'suppings pg3200.txt +'supposing' pg3200.txt +'suspenso pg3200.txt +'t pg3200.txt, pg100.txt +'t! pg100.txt +'t, pg100.txt +'t. pg100.txt +'t? pg100.txt +'tain't pg3200.txt +'take pg3200.txt +'tell pg3200.txt +'tell's pg3200.txt +'tend pg3200.txt +'texas,' pg3200.txt +'that pg31100.txt, pg3200.txt +'that's pg3200.txt +'the pg31100.txt, pg3200.txt, pg100.txt +'then pg31100.txt, pg3200.txt +'there, pg3200.txt +'these pg3200.txt +'they pg31100.txt, pg3200.txt +'think.' pg3200.txt +'this pg3200.txt +'thortships,'--and pg3200.txt +'thraw'. pg3200.txt +'times-democrat' pg3200.txt +'tis pg3200.txt, pg100.txt +'tis, pg100.txt +'tis- pg100.txt +'tis. pg3200.txt, pg100.txt +'tis: pg100.txt +'tis; pg100.txt +'tisn't pg3200.txt +'to-morrow,' pg31100.txt +'tom pg3200.txt +'touch pg3200.txt +'troilus'? pg100.txt +'tu-who; pg100.txt +'tush, pg100.txt +'twa'n't pg3200.txt +'twain.'" pg3200.txt +'twas pg3200.txt, pg100.txt +'twere pg3200.txt, pg100.txt +'twere, pg100.txt +'twill pg100.txt +'twixt pg100.txt +'twouldn't pg3200.txt +'ud pg3200.txt +'um pg3200.txt +'unjust'! pg100.txt +'upper.' pg3200.txt +'us pg3200.txt +'uz pg3200.txt +'verily' pg100.txt +'verily,' pg100.txt +'voice' pg3200.txt +'void.' pg3200.txt +'wait pg3200.txt +'was pg31100.txt +'water pg3200.txt +'way pg3200.txt +'way. pg3200.txt +'we pg3200.txt, pg100.txt +'we'll pg3200.txt +'wear pg100.txt +'weather.'" pg3200.txt +'welcome!' pg100.txt +'welcome' pg100.txt +'well pg3200.txt +'well, pg31100.txt, pg3200.txt +'well,' pg3200.txt +'well--yes.' pg3200.txt +'well?' pg3200.txt +'were pg3200.txt +'what pg3200.txt +'what!' pg3200.txt +'what's pg3200.txt +'what-have-i- pg3200.txt +'wheat pg3200.txt +'when pg3200.txt, pg100.txt +'where pg3200.txt +'where, pg3200.txt +'wherefore pg3200.txt +'which pg100.txt +'white-sleeve pg3200.txt +'who pg3200.txt +'why pg3200.txt +'why, pg3200.txt +'why,' pg3200.txt +'why?' pg3200.txt +'widow' pg100.txt +'will', pg100.txt +'will.' pg100.txt +'win, pg3200.txt +'wit, pg100.txt +'wolf pg3200.txt +'wonder pg100.txt +'wonderfully pg3200.txt +'x.' pg3200.txt +'yaah!' pg3200.txt +'years' pg100.txt +'yes, pg3200.txt +'yes,' pg31100.txt, pg3200.txt +'yes.' pg3200.txt, pg100.txt +'you pg100.txt +'zerve pg3200.txt +("for pg3200.txt +("literature," pg3200.txt +($2 pg3200.txt +($600). pg3200.txt +('69 pg3200.txt +('the pg3200.txt +(*) pg100.txt +(. pg3200.txt +(11) pg3200.txt +(1588), pg3200.txt +(1803) pg31100.txt +(1814) pg31100.txt +(1818) pg31100.txt +(1867) pg3200.txt +(1868) pg3200.txt +(1877) pg3200.txt +(1879) pg3200.txt +(1935). pg3200.txt +(2) pg100.txt +(212-254-5093) pg100.txt +(4 pg3200.txt +(?) pg3200.txt +(a pg31100.txt, pg3200.txt +(a) pg3200.txt +(a, pg3200.txt +(abroad pg3200.txt +(according pg3200.txt +(added pg31100.txt +(admitted pg3200.txt +(after pg3200.txt +(almost pg31100.txt +(aloud.) pg3200.txt +(always pg31100.txt +(and pg31100.txt, pg3200.txt, pg100.txt +(anne pg31100.txt +(answered pg31100.txt +(apparently) pg3200.txt +(as pg31100.txt, pg3200.txt, pg100.txt +(aside.) pg3200.txt +(at pg3200.txt +(barrett) pg3200.txt +(barring pg3200.txt +(because pg3200.txt +(behind pg3200.txt +(between pg31100.txt +(boston). pg3200.txt +(buffalo). pg3200.txt +(but pg3200.txt +(by pg3200.txt +(c) pg3200.txt +(cal), pg3200.txt +(call pg3200.txt +(called pg3200.txt +(called) pg3200.txt +(characterless). pg3200.txt +(columbia pg3200.txt +(comparatively pg31100.txt +(copyrighted.); pg3200.txt +(correspondence) pg3200.txt +(cried pg31100.txt +(curtain.) pg3200.txt +(d) pg3200.txt +(dan pg3200.txt +(date) pg3200.txt +(descendants pg3200.txt +(dilworthy's) pg3200.txt +(dumb-show pg3200.txt +(dwell,) pg3200.txt +(editor pg3200.txt +(elle pg3200.txt +(elmira). pg3200.txt +(error pg3200.txt +(et pg3200.txt +(everybody pg3200.txt +(exactly pg3200.txt +(except pg3200.txt +(famous pg3200.txt +(fifty pg3200.txt +(fires?) pg3200.txt +(florence) pg3200.txt +(for pg31100.txt, pg3200.txt, pg100.txt +(forenoon) pg3200.txt +(from pg3200.txt +(given pg3200.txt +(glancing pg31100.txt +(gone)--is pg3200.txt +(got pg3200.txt +(great pg31100.txt, pg3200.txt +(hades),** pg3200.txt +(hair,) pg3200.txt +(hands pg3200.txt +(harpers). pg3200.txt +(hartford). pg3200.txt +(having pg3200.txt +(he pg31100.txt, pg3200.txt +(his pg100.txt +(holding pg31100.txt +(holy pg3200.txt +(i pg31100.txt, pg3200.txt +(i.e. pg3200.txt +(i.e., pg3200.txt +(if pg31100.txt, pg3200.txt +(immense pg3200.txt +(in pg31100.txt, pg3200.txt +(in-satin-and-silk-covered- pg3200.txt +(inasmuch pg3200.txt +(including pg3200.txt +(indefinite), pg3200.txt +(inscribed) pg3200.txt +(it pg3200.txt +(its pg3200.txt +(its) pg3200.txt +(jameson's) pg3200.txt +(joe pg3200.txt +(john pg3200.txt +(just pg3200.txt +(kansas pg3200.txt +(keokuk). pg3200.txt +(krugersdorp--the pg3200.txt +(last pg3200.txt +(laughing pg31100.txt +(laughter) pg3200.txt +(lausanne). pg3200.txt +(letters, pg3200.txt +(lioness) pg3200.txt +(liverpool). pg3200.txt +(london). pg3200.txt +(looking pg31100.txt +(m)ark. pg3200.txt +(maize), pg3200.txt +(marry, pg100.txt +(mighty pg3200.txt +(mighty-hunter-with-a-hog-eye-twain) pg3200.txt +(militarily) pg3200.txt +(muffled) pg100.txt +(name pg3200.txt +(native) pg3200.txt +(new pg3200.txt +(normandie) pg3200.txt +(not pg3200.txt, pg100.txt +(of pg3200.txt +(on pg3200.txt +(one pg3200.txt +(only pg3200.txt +(or pg3200.txt +(or, pg3200.txt +(pause--awed, pg3200.txt +(pause--thinking.) pg3200.txt +(pause.) pg3200.txt +(payable pg3200.txt +(perhaps pg31100.txt +(philadelphia). pg3200.txt +(pistols). pg3200.txt +(portland pg3200.txt +(pregnant pg3200.txt +(principally pg3200.txt +(private) pg3200.txt +(pronounced pg3200.txt +(provided pg3200.txt +(published pg3200.txt +(puts pg3200.txt +(quadrupling pg3200.txt +(refer pg3200.txt +(repeats. pg3200.txt +(replied pg31100.txt +(returning pg31100.txt +(roughly pg3200.txt +(sachseln) pg3200.txt +(said pg31100.txt, pg3200.txt +(sauce pg3200.txt +(save, pg31100.txt +(says pg31100.txt +(see pg3200.txt +(seventy- pg3200.txt +(several pg3200.txt +(she pg3200.txt +(shows pg3200.txt +(sic) pg3200.txt +(sic)... pg3200.txt +(signed pg3200.txt +(sings) pg100.txt +(some pg3200.txt +(somehow pg3200.txt +(song-bird), pg3200.txt +(speaking pg31100.txt +(suppressing pg3200.txt +(taking pg3200.txt +(technically pg3200.txt +(termed, pg3200.txt +(that pg3200.txt +(that's pg3200.txt +(the pg31100.txt, pg3200.txt +(there pg31100.txt, pg3200.txt +(they pg31100.txt, pg3200.txt +(think pg3200.txt +(this pg3200.txt +(tho' pg31100.txt +(though pg31100.txt, pg3200.txt +(three pg3200.txt +(tied pg3200.txt +(titter pg3200.txt +(to pg3200.txt +(turning pg31100.txt, pg3200.txt +(two pg3200.txt +(unasked pg31100.txt +(unfinished) pg3200.txt +(unfinished). pg3200.txt +(unfinished.) pg3200.txt +(unproduced). pg3200.txt +(unpublished) pg3200.txt +(unpublished). pg3200.txt +(unsigned). pg3200.txt +(usually pg3200.txt +(verbal) pg3200.txt +(vienna). pg3200.txt +(want pg3200.txt +(washington). pg3200.txt +(weib) pg3200.txt +(well, pg3200.txt +(when pg3200.txt +(where pg3200.txt +(whereto pg100.txt +(which pg3200.txt, pg100.txt +(who pg31100.txt, pg3200.txt +(who, pg3200.txt +(whose pg3200.txt +(width pg3200.txt +(winters) pg3200.txt +(with pg31100.txt, pg3200.txt +(within pg31100.txt +(without pg3200.txt +(wolf) pg3200.txt +(www.gutenberg.net), pg3200.txt +(www.gutenberg.org), pg31100.txt, pg100.txt +(you pg3200.txt +(your pg3200.txt +* pg31100.txt, pg3200.txt +** pg100.txt +*** pg31100.txt, pg3200.txt, pg100.txt +**** pg100.txt +***** pg31100.txt, pg3200.txt, pg100.txt +*either*: pg100.txt ++ pg3200.txt +- pg100.txt +--"and pg3200.txt +--- pg3200.txt +---- pg3200.txt +----' pg3200.txt +----'s pg31100.txt +----, pg3200.txt +-----(unfinished). pg3200.txt +------ pg3200.txt +---------- pg3200.txt +----------- pg3200.txt +------. pg3200.txt +------.' pg31100.txt +------said: pg3200.txt +----. pg3200.txt +----alas!--------alas!" pg3200.txt +----hotel. pg31100.txt +----shire pg31100.txt +---5, pg31100.txt +---?' pg3200.txt +--1838. pg3200.txt +--about pg3200.txt +--always?" pg3200.txt +--and pg3200.txt +--apologized. pg3200.txt +--because pg3200.txt +--but pg3200.txt +--chicken-heart!" pg3200.txt +--clothes. pg3200.txt +--come!" pg3200.txt +--degrade pg3200.txt +--ear.' pg3200.txt +--editor.] pg3200.txt +--electroincandescently--oligarcheologically--sanchrosynchro- pg3200.txt +--er--" pg3200.txt +--excommunication: pg3200.txt +--for pg3200.txt +--half-past--seven! pg3200.txt +--he! pg3200.txt +--hotel pg31100.txt +--i pg3200.txt +--knew pg3200.txt +--longingly: pg3200.txt +--look, pg3200.txt +--m.t. pg3200.txt +--m.t.] pg3200.txt +--march! pg3200.txt +--no pg3200.txt +--none pg3200.txt +--perhaps pg3200.txt +--polite pg3200.txt +--proofs.' pg3200.txt +--then pg3200.txt +--translator pg3200.txt +--translator. pg3200.txt +--youth. pg3200.txt +-and, pg100.txt +-here, pg100.txt +-matter? pg100.txt +-why pg100.txt +." pg31100.txt, pg3200.txt +.$6.00 pg3200.txt +... pg3200.txt +..." pg3200.txt +.... pg3200.txt +...." pg3200.txt +..... pg3200.txt +.................. pg3200.txt +.................... pg3200.txt +..................... pg3200.txt +...................... pg3200.txt +....................... pg3200.txt +......................... pg3200.txt +........................... pg3200.txt +............................$2.15 pg3200.txt +.............................. pg3200.txt +..........................34,952.00 pg3200.txt +.............18,104 pg3200.txt +............187.. pg3200.txt +.......63,053.68 pg3200.txt +..35,317.50 pg3200.txt +.02 pg3200.txt +.10,000 pg3200.txt +.15 pg3200.txt +.34 pg3200.txt +.47 pg3200.txt +.66 pg3200.txt +.75 pg3200.txt +.98 pg3200.txt +.we pg3200.txt +0. pg31100.txt, pg3200.txt +00{half} pg3200.txt +02 pg3200.txt +05{half} pg3200.txt +06 pg3200.txt +1 pg31100.txt, pg3200.txt, pg100.txt +1). pg3200.txt +1, pg3200.txt +1,000 pg3200.txt +1,000,000, pg3200.txt +1,050 pg3200.txt +1,100.00 pg3200.txt +1,150 pg3200.txt +1,204 pg3200.txt +1,204.00 pg3200.txt +1,500 pg3200.txt +1,500,000 pg3200.txt +1,500. pg3200.txt +1,600 pg3200.txt +1,719 pg3200.txt +1,750.00 pg3200.txt +1. pg3200.txt, pg100.txt +1.) pg3200.txt +1.25 pg3200.txt +1.50 pg3200.txt +1.66 pg3200.txt +1.e.1 pg31100.txt, pg3200.txt, pg100.txt +1.e.1. pg31100.txt, pg3200.txt, pg100.txt +1.e.8 pg31100.txt, pg3200.txt, pg100.txt +1.e.8. pg31100.txt, pg3200.txt, pg100.txt +1.e.9. pg31100.txt, pg3200.txt, pg100.txt +1.f. pg31100.txt, pg3200.txt, pg100.txt +1/2 pg3200.txt +10 pg31100.txt, pg3200.txt, pg100.txt +10). pg3200.txt +10, pg3200.txt +10--and pg3200.txt +10. pg3200.txt, pg100.txt +10." pg31100.txt +10.30. pg3200.txt +100 pg3200.txt, pg100.txt +100,000 pg3200.txt +100-millionaire pg3200.txt +100. pg3200.txt +100.00 pg3200.txt +1000 pg3200.txt +101 pg100.txt +102 pg100.txt +103 pg100.txt +103, pg3200.txt +103.' pg3200.txt +104 pg100.txt +105 pg100.txt +106 pg100.txt +107 pg100.txt +108 pg3200.txt, pg100.txt +109 pg100.txt +10:30, pg3200.txt +10; pg3200.txt +10] pg3200.txt +10]. pg3200.txt +10th pg31100.txt, pg3200.txt +10th. pg3200.txt +10th?" pg3200.txt +11 pg31100.txt, pg3200.txt, pg100.txt +11, pg3200.txt +11,000. pg3200.txt +11,654 pg3200.txt +11,882 pg3200.txt +11. pg3200.txt +11.) pg3200.txt +11.15 pg3200.txt +110 pg100.txt +111 pg3200.txt, pg100.txt +112 pg100.txt +113 pg100.txt +114 pg100.txt +115 pg100.txt +116 pg3200.txt, pg100.txt +117 pg100.txt +118 pg100.txt +119 pg3200.txt, pg100.txt +11th pg3200.txt +12 pg31100.txt, pg3200.txt, pg100.txt +12, pg3200.txt +12,000,000 pg3200.txt +12. pg3200.txt +12.) pg3200.txt +12.15. pg3200.txt +12.19. pg3200.txt +12.50. pg3200.txt +120 pg100.txt +1200 pg3200.txt +121 pg100.txt +122 pg100.txt +123 pg100.txt +124 pg3200.txt, pg100.txt +125 pg3200.txt, pg100.txt +126 pg100.txt +127 pg100.txt +128 pg100.txt +129 pg100.txt +12th pg3200.txt +12th, pg3200.txt +13 pg31100.txt, pg3200.txt, pg100.txt +13,000, pg3200.txt +13-18, pg3200.txt +13. pg3200.txt +13.) pg3200.txt +13.20 pg3200.txt +130 pg3200.txt, pg100.txt +131 pg100.txt +132 pg100.txt +133 pg100.txt +133,323.18 pg3200.txt +134 pg100.txt +135 pg100.txt +135. pg3200.txt +136 pg100.txt +137 pg100.txt +138 pg3200.txt, pg100.txt +138. pg3200.txt +139 pg100.txt +139. pg3200.txt +13th pg31100.txt, pg3200.txt +13th, pg3200.txt +14 pg31100.txt, pg3200.txt, pg100.txt +14, pg3200.txt +14,) pg3200.txt +14,000 pg3200.txt +14. pg3200.txt +14." pg3200.txt +14.) pg3200.txt +140 pg100.txt +141 pg100.txt +141. pg3200.txt +142 pg100.txt +142. pg3200.txt +143 pg100.txt +1431, pg3200.txt +144 pg100.txt +145 pg100.txt +1456. pg3200.txt +146 pg100.txt +1463 pg3200.txt +147 pg100.txt +148 pg100.txt +149 pg3200.txt, pg100.txt +1492. pg3200.txt +14th pg3200.txt +14th) pg3200.txt +14th). pg3200.txt +14th, pg3200.txt +14th. pg3200.txt +15 pg31100.txt, pg3200.txt, pg100.txt +15,000 pg3200.txt +15,51. pg3200.txt +15,762 pg3200.txt +15-puzzles pg3200.txt +15-year pg3200.txt +15. pg3200.txt +15.) pg3200.txt +150 pg3200.txt, pg100.txt +151 pg100.txt +152 pg100.txt +153 pg100.txt +1536. pg3200.txt +154 pg100.txt +1564. pg3200.txt +1589,' pg3200.txt +1591 pg100.txt +1592 pg100.txt +1593 pg100.txt +1594 pg100.txt +1595 pg100.txt +1596 pg100.txt +1597 pg100.txt +1598 pg100.txt +1599 pg100.txt +1599. pg3200.txt +15: pg3200.txt +15th pg3200.txt +15th. pg3200.txt +16 pg31100.txt, pg3200.txt, pg100.txt +16,000 pg3200.txt +16. pg3200.txt +16.) pg3200.txt +1600, pg3200.txt +1601 pg3200.txt, pg100.txt +1601. pg3200.txt +1601.] pg3200.txt +1601? pg3200.txt +1601] pg3200.txt +1602 pg100.txt +1602. pg3200.txt +1603 pg100.txt +1604 pg100.txt +1605 pg100.txt +1605. pg3200.txt +1606 pg100.txt +1607 pg100.txt +1608 pg100.txt +1609 pg100.txt +1609. pg3200.txt +1611 pg100.txt +1612 pg100.txt +1620--the pg3200.txt +1643. pg3200.txt +1665. pg3200.txt +1665.) pg3200.txt +1673. pg3200.txt +168 pg3200.txt +1680, pg3200.txt +1681. pg3200.txt +1682. pg3200.txt +1693, pg3200.txt +1693. pg3200.txt +16: pg3200.txt +16th pg31100.txt, pg3200.txt +16th, pg3200.txt +16th. pg3200.txt +17 pg31100.txt, pg3200.txt, pg100.txt +17, pg3200.txt +17,547 pg3200.txt +17. pg3200.txt +17.) pg3200.txt +1721: pg3200.txt +1722 pg3200.txt +1722. pg3200.txt +174. pg3200.txt +175. pg3200.txt +1776. pg3200.txt +1778." pg3200.txt +178 pg3200.txt +1783. pg3200.txt +1790. pg31100.txt +1791. pg31100.txt +1791." pg31100.txt +1792. pg31100.txt +1797-98, pg31100.txt +17; pg3200.txt +17th pg3200.txt +17th! pg3200.txt +17th. pg3200.txt +18 pg31100.txt, pg3200.txt, pg100.txt +18) pg3200.txt +18,206 pg3200.txt +18--. pg3200.txt +18. pg3200.txt +180 pg3200.txt +1803:-- pg31100.txt +1804. pg3200.txt +1807, pg3200.txt +1808, pg3200.txt +1810, pg3200.txt +1813 pg3200.txt +1814'." pg3200.txt +1814). pg3200.txt +1814. pg3200.txt +1814." pg3200.txt +1815. pg3200.txt +1819 pg3200.txt +1822-1888, pg3200.txt +1827, pg3200.txt +1830--he pg3200.txt +1831, pg3200.txt +1832! pg3200.txt +1832.] pg3200.txt +1834 pg3200.txt +1835-1910) pg3200.txt +1835. pg3200.txt +1836. pg3200.txt +1839 pg3200.txt +1840-7-8. pg3200.txt +1842. pg3200.txt +1844 pg3200.txt +1844. pg3200.txt +1845. pg3200.txt +1845." pg3200.txt +1847, pg3200.txt +1848,) pg3200.txt +1849.' pg3200.txt +185 pg3200.txt +1851, pg3200.txt +1851-1910 pg3200.txt +1851. pg3200.txt +1852-53. pg3200.txt +1853 pg3200.txt +1853-1866 pg3200.txt +1853-1910 pg3200.txt +1853. pg3200.txt +1853: pg3200.txt +1854, pg3200.txt +1855-56. pg3200.txt +1856. pg3200.txt +1857, pg3200.txt +1857. pg3200.txt +1858. pg3200.txt +1859. pg3200.txt +1859: pg3200.txt +1859]. pg3200.txt +186 pg3200.txt +186-." pg3200.txt +1860 pg3200.txt +1860, pg3200.txt +1860. pg3200.txt +1861 pg3200.txt +1861, pg3200.txt +1861. pg3200.txt +1861.) pg3200.txt +1862 pg3200.txt +1862, pg3200.txt +1862. pg3200.txt +1862: pg3200.txt +1863 pg3200.txt +1863, pg3200.txt +1863. pg3200.txt +1864, pg3200.txt +1864. pg3200.txt +1864] pg3200.txt +1865 pg3200.txt +1865" pg3200.txt +1865. pg3200.txt +1865.] pg3200.txt +1865] pg3200.txt +1866 pg3200.txt +1866, pg3200.txt +1866. pg3200.txt +1866.] pg3200.txt +1867 pg3200.txt +1867, pg3200.txt +1867-1875 pg3200.txt +1867. pg3200.txt +1867.] pg3200.txt +1868, pg3200.txt +1868-69. pg3200.txt +1868. pg3200.txt +1868.] pg3200.txt +1869-70. pg3200.txt +1869. pg3200.txt +1869.] pg3200.txt +1869] pg3200.txt +187-, pg3200.txt +1870 pg3200.txt +1870. pg3200.txt +1870.] pg3200.txt +1871 pg3200.txt +1871, pg3200.txt +1871-2 pg3200.txt +1871-72. pg3200.txt +1871. pg3200.txt +1871.] pg3200.txt +1872 pg3200.txt +1872. pg3200.txt +1872.] pg3200.txt +1873 pg3200.txt +1873, pg3200.txt +1873-74. pg3200.txt +1873. pg3200.txt +1873;--[this pg3200.txt +1874 pg3200.txt +1874. pg3200.txt +1874: pg3200.txt +1874] pg3200.txt +1875 pg3200.txt +1875, pg3200.txt +1875. pg3200.txt +1876 pg3200.txt +1876-1885 pg3200.txt +1876. pg3200.txt +1876] pg3200.txt +1877, pg3200.txt +1877. pg3200.txt +1878 pg3200.txt +1878. pg3200.txt +1879 pg3200.txt +1879, pg3200.txt +1879. pg3200.txt +1879.] pg3200.txt +1880 pg3200.txt +1880! pg3200.txt +1880, pg3200.txt +1880. pg3200.txt +1881 pg3200.txt +1881, pg3200.txt +1881. pg3200.txt +1882. pg3200.txt +1882.) pg3200.txt +1882.]} pg3200.txt +1883. pg3200.txt +1884. pg3200.txt +1885. pg3200.txt +1886 pg3200.txt +1886--along pg3200.txt +1886-1900 pg3200.txt +1886. pg3200.txt +1886: pg3200.txt +1887, pg3200.txt +1887. pg3200.txt +1888 pg3200.txt +1888, pg3200.txt +1888. pg3200.txt +1889 pg3200.txt +1889, pg3200.txt +1889. pg3200.txt +1889.] pg3200.txt +1890, pg3200.txt +1890. pg3200.txt +1891 pg3200.txt +1891. pg3200.txt +1891: pg3200.txt +1891] pg3200.txt +1892 pg3200.txt +1892. pg3200.txt +1893 pg3200.txt +1893. pg3200.txt +1894. pg3200.txt +1895 pg3200.txt +1895, pg3200.txt +1895. pg3200.txt +1896 pg31100.txt +1896, pg3200.txt +1896. pg3200.txt +1897 pg3200.txt +1897, pg3200.txt +1897. pg3200.txt +1897.] pg3200.txt +1898 pg3200.txt +1898. pg3200.txt +1899 pg3200.txt +1899. pg3200.txt +18th pg31100.txt, pg3200.txt +19 pg31100.txt, pg3200.txt, pg100.txt +19) pg3200.txt +19, pg3200.txt +19. pg3200.txt +190 pg3200.txt +1900 pg3200.txt +1900. pg3200.txt +1901 pg3200.txt +1901) pg3200.txt +1901-1906 pg3200.txt +1901. pg3200.txt +1902 pg31100.txt, pg3200.txt +1902, pg3200.txt +1902. pg3200.txt +1903. pg3200.txt +1904 pg3200.txt +1904. pg3200.txt +1905 pg3200.txt +1905. pg3200.txt +1906 pg3200.txt +1906, pg3200.txt +1906-07. pg3200.txt +1906. pg3200.txt +1907 pg3200.txt +1907, pg3200.txt +1907-1910 pg3200.txt +1907. pg3200.txt +1908 pg3200.txt +1908, pg3200.txt +1908. pg3200.txt +1909 pg3200.txt +1909, pg3200.txt +1909. pg3200.txt +1910 pg3200.txt +1910). pg3200.txt +1910. pg3200.txt +1916. pg3200.txt +1920 pg3200.txt +1994 pg100.txt +1: pg3200.txt +1; pg3200.txt +1? pg3200.txt +1] pg3200.txt +1st pg31100.txt, pg3200.txt +1st, pg31100.txt, pg3200.txt +1st. pg31100.txt, pg3200.txt +1st." pg3200.txt +2 pg31100.txt, pg3200.txt, pg100.txt +2!" pg3200.txt +2, pg3200.txt +2,'85. pg3200.txt +2,000 pg3200.txt +2,115,000. pg3200.txt +2,600 pg3200.txt +2,700 pg3200.txt +2,800 pg3200.txt +2. pg31100.txt, pg3200.txt, pg100.txt +2.) pg3200.txt +2.00 pg3200.txt +2.15. pg3200.txt +2.25 pg3200.txt +2.50 pg3200.txt +20 pg31100.txt, pg3200.txt, pg100.txt +20,000 pg3200.txt +20,000,000? pg3200.txt +20. pg3200.txt +20.) pg3200.txt +200 pg3200.txt +200,000 pg3200.txt +205. pg3200.txt +20th pg31100.txt, pg3200.txt +20th, pg3200.txt +20th--yes." pg3200.txt +20th. pg3200.txt +20th." pg3200.txt +21 pg31100.txt, pg3200.txt, pg100.txt +21,1866. pg3200.txt +21. pg3200.txt +21.) pg3200.txt +219.] pg31100.txt +21st; pg3200.txt +22 pg31100.txt, pg3200.txt, pg100.txt +22,) pg3200.txt +22,894. pg3200.txt +22. pg3200.txt +22.) pg3200.txt +226 pg3200.txt +23 pg31100.txt, pg3200.txt, pg100.txt +23, pg3200.txt +23,328 pg3200.txt +23. pg3200.txt +23.) pg3200.txt +23d, pg3200.txt +23rd pg31100.txt +24 pg31100.txt, pg3200.txt, pg100.txt +24) pg3200.txt +24, pg3200.txt +24-68. pg3200.txt +24. pg3200.txt +24.) pg3200.txt +246) pg3200.txt +246. pg3200.txt +24th pg3200.txt +25 pg31100.txt, pg3200.txt, pg100.txt +25, pg3200.txt +25. pg3200.txt +25.) pg3200.txt +250, pg3200.txt +250,000 pg3200.txt +250.00 pg3200.txt +25th--christmas pg3200.txt +25th--occupied pg3200.txt +25th. pg3200.txt +26 pg31100.txt, pg3200.txt, pg100.txt +26! pg3200.txt +26, pg3200.txt +26,1853. pg3200.txt +26,362 pg3200.txt +26. pg3200.txt +26.) pg3200.txt +26th pg31100.txt +26th. pg3200.txt +27 pg31100.txt, pg3200.txt, pg100.txt +27. pg3200.txt +27th pg31100.txt +27th. pg3200.txt +27th; pg3200.txt +27{half} pg3200.txt +28 pg31100.txt, pg100.txt +28,--say pg3200.txt +28. pg3200.txt +280.00 pg3200.txt +283!" pg3200.txt +28; pg3200.txt +28th, pg31100.txt +28th. pg31100.txt +29 pg31100.txt, pg3200.txt, pg100.txt +29. pg3200.txt +299 pg3200.txt +29; pg3200.txt +2] pg3200.txt +2d pg31100.txt +2d. pg3200.txt +2nd pg3200.txt +3 pg31100.txt, pg3200.txt, pg100.txt +3). pg3200.txt +3, pg3200.txt +3,000 pg3200.txt +3,000. pg3200.txt +3,122 pg3200.txt +3,1877. pg3200.txt +3,201,232 pg3200.txt +3,250.00 pg3200.txt +3,300 pg3200.txt +3,367 pg3200.txt +3,500 pg3200.txt +3,884 pg3200.txt +3. pg3200.txt, pg100.txt +3.30, pg3200.txt +3/4 pg3200.txt +30 pg31100.txt, pg3200.txt, pg100.txt +30, pg3200.txt +30,000 pg3200.txt +30. pg3200.txt +300 pg3200.txt +300, pg3200.txt +300,000 pg3200.txt +300,000,000 pg3200.txt +300,000,000.] pg3200.txt +306. pg3200.txt +307. pg3200.txt +30; pg3200.txt +30th pg3200.txt +31 pg31100.txt, pg100.txt +31. pg3200.txt +314. pg3200.txt +31st pg3200.txt +32 pg31100.txt, pg100.txt +32,500 pg3200.txt +32. pg3200.txt +320,000-odd, pg3200.txt +32; pg3200.txt +33 pg31100.txt, pg3200.txt, pg100.txt +339. pg3200.txt +33: pg3200.txt +34 pg31100.txt, pg3200.txt, pg100.txt +34. pg3200.txt +35 pg31100.txt, pg3200.txt, pg100.txt +35,000 pg3200.txt +35. pg3200.txt +350 pg3200.txt +350.00 pg3200.txt +35; pg3200.txt +36 pg31100.txt, pg100.txt +36,000 pg3200.txt +36. pg3200.txt +37 pg31100.txt, pg3200.txt, pg100.txt +37; pg3200.txt +38 pg31100.txt, pg3200.txt, pg100.txt +39 pg31100.txt, pg3200.txt, pg100.txt +39,000 pg3200.txt +39. pg3200.txt +3: pg3200.txt +3] pg3200.txt +3rd pg31100.txt +3rd--1792. pg31100.txt +3rd. pg31100.txt +3s pg3200.txt +4 pg31100.txt, pg3200.txt, pg100.txt +4, pg3200.txt +4,000 pg3200.txt +4,000.00 pg3200.txt +4,1866. pg3200.txt +4,1877. pg3200.txt +4,1901 pg3200.txt +4,356 pg3200.txt +4--hennery pg3200.txt +4-ounce pg3200.txt +4. pg3200.txt, pg100.txt +4.) pg3200.txt +40 pg31100.txt, pg3200.txt, pg100.txt +40" pg3200.txt +40,000 pg3200.txt +40,000, pg3200.txt +40. pg3200.txt +400 pg3200.txt +400,000 pg3200.txt +400. pg3200.txt +400.) pg3200.txt +403 pg3200.txt +40; pg3200.txt +41 pg31100.txt, pg100.txt +41,000. pg3200.txt +41. pg3200.txt +41st pg3200.txt +42 pg31100.txt, pg3200.txt, pg100.txt +42,000. pg3200.txt +42. pg3200.txt +43 pg31100.txt, pg3200.txt, pg100.txt +43): pg3200.txt +43. pg3200.txt +433 pg3200.txt +44 pg31100.txt, pg3200.txt, pg100.txt +45 pg31100.txt, pg3200.txt, pg100.txt +450 pg3200.txt +46 pg31100.txt, pg100.txt +46. pg3200.txt +465 pg3200.txt +46; pg3200.txt +47 pg31100.txt, pg3200.txt, pg100.txt +47,179 pg3200.txt +47. pg3200.txt +48 pg31100.txt, pg3200.txt, pg100.txt +48,000,000, pg3200.txt +48--and pg3200.txt +48. pg3200.txt +48.00 pg3200.txt +49 pg31100.txt, pg3200.txt, pg100.txt +49,159 pg3200.txt +4: pg3200.txt +4] pg3200.txt +4th pg31100.txt, pg3200.txt +4th. pg3200.txt +5 pg31100.txt, pg3200.txt, pg100.txt +5, pg31100.txt, pg3200.txt +5,000 pg3200.txt +5,000,000 pg3200.txt +5,500.00 pg3200.txt +5-year pg3200.txt +5. pg3200.txt, pg100.txt +5.) pg3200.txt +50 pg31100.txt, pg3200.txt, pg100.txt +50,000 pg3200.txt +50. pg3200.txt +500 pg3200.txt +500,000 pg3200.txt +50{half} pg3200.txt +51 pg31100.txt, pg3200.txt, pg100.txt +513. pg3200.txt +51{half} pg3200.txt +52 pg31100.txt, pg3200.txt, pg100.txt +53 pg31100.txt, pg3200.txt, pg100.txt +53,500 pg3200.txt +530. pg3200.txt +54 pg31100.txt, pg100.txt +55 pg31100.txt, pg3200.txt, pg100.txt +55, pg3200.txt +55. pg3200.txt +559575255555. pg3200.txt +56 pg31100.txt, pg3200.txt, pg100.txt +57 pg31100.txt, pg100.txt +57, pg3200.txt +57: pg3200.txt +58 pg31100.txt, pg3200.txt, pg100.txt +5868, pg3200.txt +58th pg3200.txt +59 pg31100.txt, pg3200.txt, pg100.txt +59-62; pg3200.txt +5:45. pg3200.txt +5] pg3200.txt +5th pg31100.txt, pg3200.txt +5th, pg3200.txt +6 pg31100.txt, pg3200.txt, pg100.txt +6). pg3200.txt +6, pg3200.txt +6,000 pg3200.txt +6,000.00 pg3200.txt +6,313; pg3200.txt +6,532 pg3200.txt +6,940 pg3200.txt +6. pg31100.txt, pg3200.txt, pg100.txt +6." pg3200.txt +60 pg31100.txt, pg3200.txt, pg100.txt +60,000 pg3200.txt +60,000. pg3200.txt +600 pg3200.txt +600.00 pg3200.txt +604--he pg3200.txt +60; pg3200.txt +61 pg31100.txt, pg100.txt +62 pg100.txt +63 pg3200.txt, pg100.txt +63--an pg3200.txt +638 pg3200.txt +64 pg3200.txt, pg100.txt +643 pg3200.txt +65 pg100.txt +65. pg3200.txt +6574. pg3200.txt +66 pg100.txt +66.' pg3200.txt +67 pg100.txt +67: pg3200.txt +68 pg100.txt +688 pg3200.txt +69 pg100.txt +6?" pg3200.txt +6th pg31100.txt, pg3200.txt +6th, pg3200.txt +6th. pg3200.txt +6th? pg3200.txt +7 pg31100.txt, pg3200.txt, pg100.txt +7, pg3200.txt +7. pg3200.txt, pg100.txt +7.) pg3200.txt +7.25 pg3200.txt +7.30 pg3200.txt +7.30. pg3200.txt +70 pg3200.txt, pg100.txt +70,000 pg3200.txt +70. pg3200.txt +70.) pg3200.txt +70.00 pg3200.txt +700 pg3200.txt +700, pg3200.txt +71 pg100.txt +72 pg100.txt +72.) pg3200.txt +72; pg3200.txt +73 pg100.txt +730 pg3200.txt +74 pg100.txt +75 pg100.txt +76 pg3200.txt, pg100.txt +76. pg3200.txt +77 pg100.txt +78 pg3200.txt, pg100.txt +78. pg3200.txt +79 pg100.txt +79, pg3200.txt +7902 pg3200.txt +7] pg3200.txt +7th pg31100.txt, pg3200.txt +7th. pg3200.txt +8 pg31100.txt, pg3200.txt, pg100.txt +8, pg3200.txt +8,046 pg3200.txt +8. pg3200.txt, pg100.txt +8." pg3200.txt +8.' pg3200.txt +8.) pg3200.txt +8.29 pg3200.txt +8.30 pg3200.txt +8.4.0, pg3200.txt +80 pg3200.txt, pg100.txt +80. pg3200.txt +800 pg3200.txt +81 pg100.txt +82 pg100.txt +82: pg3200.txt +83 pg100.txt +84 pg100.txt +840 pg3200.txt +85 pg100.txt +86 pg100.txt +87 pg100.txt +88 pg100.txt +88,000,000 pg3200.txt +89 pg100.txt +89,271 pg3200.txt +896. pg3200.txt +8; pg3200.txt +8] pg3200.txt +8s. pg3200.txt +8th pg31100.txt, pg3200.txt +8th, pg3200.txt +8th--but pg3200.txt +8th. pg3200.txt +9 pg31100.txt, pg3200.txt, pg100.txt +9, pg3200.txt +9,500.00 pg3200.txt +9. pg31100.txt, pg3200.txt, pg100.txt +9.) pg3200.txt +9.30 pg3200.txt +9.30a.m. pg3200.txt +90 pg100.txt +90) pg100.txt +90,328; pg3200.txt +900 pg3200.txt +91 pg100.txt +917. pg3200.txt +92 pg3200.txt, pg100.txt +93 pg100.txt +931 pg3200.txt +94 pg100.txt +95 pg100.txt +9500 pg3200.txt +96 pg100.txt +97 pg3200.txt, pg100.txt +98 pg100.txt +98. pg3200.txt +99 pg100.txt +99, pg100.txt +9:30 pg3200.txt +9] pg3200.txt +9th pg3200.txt +9th." pg3200.txt +9th.--m.t. pg3200.txt +<exeunt pg100.txt +<exit pg100.txt +=== pg3200.txt +? pg100.txt +?" pg3200.txt +?' pg3200.txt +@ pg3200.txt +["we pg3200.txt +['05] pg3200.txt +['80]. pg3200.txt +['teach' pg3200.txt +[1. pg3200.txt +[1874] pg3200.txt +[1904] pg3200.txt +[1] pg3200.txt +[1]. pg3200.txt +[2] pg3200.txt +[4] pg3200.txt +[a pg3200.txt +[above]. pg100.txt +[advancing] pg100.txt +[alarum] pg100.txt +[an pg3200.txt +[another pg3200.txt +[another] pg3200.txt +[applause.] pg3200.txt +[ascends] pg100.txt +[aside] pg100.txt +[at pg3200.txt +[balthasar]. pg100.txt +[capt. pg3200.txt +[carriers]. pg100.txt +[cheers.] pg3200.txt +[comparisons pg3200.txt +[crash pg3200.txt +[date, pg3200.txt +[dies.] pg100.txt +[dies] pg100.txt +[digging] pg100.txt +[disarms] pg100.txt +[disguised]. pg100.txt +[door]} pg3200.txt +[draws.] pg100.txt +[draws] pg100.txt +[drinks] pg100.txt +[drums pg100.txt +[edgar]. pg100.txt +[exeunt pg100.txt +[exeunt.] pg3200.txt, pg100.txt +[exit pg3200.txt, pg100.txt +[exit. pg100.txt +[exit.] pg3200.txt, pg100.txt +[extract.] pg3200.txt +[floulish] pg100.txt +[flourish] pg100.txt +[fool]. pg100.txt +[friar]. pg100.txt +[gentleman]. pg100.txt +[gloucester]. pg100.txt +[going.] pg3200.txt, pg100.txt +[going] pg100.txt +[great pg3200.txt +[guarded]. pg100.txt +[hautboys] pg100.txt +[he pg3200.txt +[here pg3200.txt, pg100.txt +[hides.] pg100.txt +[his pg3200.txt +[i pg3200.txt +[iii] pg3200.txt +[indicating pg3200.txt +[is pg3200.txt +[kissing pg100.txt +[kneeling] pg100.txt +[kneels.] pg100.txt +[kneels] pg100.txt +[knock] pg100.txt +[knocking] pg100.txt +[knocks] pg100.txt +[laughter.] pg3200.txt +[laurence]. pg100.txt +[life pg3200.txt +[meeting]. pg100.txt +[murmurs pg3200.txt +[music.] pg100.txt +[music] pg100.txt +[n.b.--this pg3200.txt +[oswald]. pg100.txt +[others]. pg100.txt +[pause.] pg3200.txt +[peter]. pg100.txt +[polonius]. pg100.txt +[reads.] pg100.txt +[reads] pg100.txt +[retires.] pg100.txt +[retires] pg100.txt +[retiring] pg100.txt +[reveille] pg3200.txt +[riotous pg3200.txt +[rises] pg100.txt +[rising] pg100.txt +[said pg3200.txt +[sensation.] pg3200.txt +[servingman]. pg100.txt +[severally]. pg100.txt +[shouts pg100.txt +[sic]? pg3200.txt +[singing] pg100.txt +[sings.] pg100.txt +[sings] pg100.txt +[sits.] pg3200.txt +[sleeps] pg100.txt +[storm pg3200.txt +[subscribes] pg100.txt +[swoons] pg100.txt +[tableau.] pg3200.txt +[tabu pg3200.txt +[taps] pg3200.txt +[tempest pg3200.txt +[the pg3200.txt +[them]. pg100.txt +[there pg3200.txt +[this pg3200.txt +[throws pg100.txt +[thunder] pg100.txt +[to pg100.txt +[tom pg3200.txt +[translation.] pg3200.txt +[true pg3200.txt +[true, pg3200.txt +[trumpets pg100.txt +[trumpets] pg100.txt +[tucket] pg100.txt +[turning pg3200.txt +[unmasked]. pg100.txt +[unmasks.] pg100.txt +[unveiling] pg100.txt +[verges]. pg100.txt +[what pg3200.txt +[whispers] pg100.txt +[with pg100.txt +[within]. pg100.txt +[writes.] pg100.txt +[writes] pg100.txt +\ pg3200.txt +] pg3200.txt +]eneas pg100.txt +_adair_ pg31100.txt +_advantages_ pg31100.txt +_all pg31100.txt +_all!_ pg3200.txt +_all_ pg3200.txt +_allow_ pg31100.txt +_alone_, pg31100.txt +_am_ pg31100.txt +_amor_ pg31100.txt +_another pg31100.txt +_any_." pg31100.txt +_any_body pg3200.txt +_appearance_ pg31100.txt +_appearance_, pg31100.txt +_appearing_ pg31100.txt +_are_ pg31100.txt, pg3200.txt +_at_ pg31100.txt +_be'd_ pg31100.txt +_be_, pg31100.txt +_being pg31100.txt +_boulanger_--" pg31100.txt +_but!_ pg3200.txt +_cecilia_ pg31100.txt +_compassion_ pg31100.txt +_corps_. pg31100.txt +_could_ pg3200.txt +_couldn't_ pg3200.txt +_court_. pg31100.txt +_daughters_ pg31100.txt +_did_. pg3200.txt +_did_." pg3200.txt +_do_ pg31100.txt +_do_, pg31100.txt +_do_." pg31100.txt +_does_, pg31100.txt +_doubts_ pg31100.txt +_e.g._ pg31100.txt +_elinor pg31100.txt +_elton_ pg31100.txt +_endeavour_ pg31100.txt +_engaged_ pg31100.txt +_fact_." pg3200.txt +_first_ pg31100.txt +_first_?" pg31100.txt +_frontispiece_ pg31100.txt +_great pg31100.txt +_great_ pg31100.txt +_had_ pg31100.txt +_had_; pg31100.txt +_half_ pg31100.txt +_hand_ pg31100.txt +_has_ pg31100.txt +_has_, pg31100.txt +_have_ pg31100.txt +_he_ pg31100.txt +_heard_ pg31100.txt +_her._" pg31100.txt +_her_ pg31100.txt +_her_, pg31100.txt +_her_. pg31100.txt +_her_." pg31100.txt +_here_ pg31100.txt +_here_. pg31100.txt +_hers_ pg31100.txt +_him._ pg31100.txt +_him._" pg31100.txt +_him_ pg31100.txt +_him_!" pg31100.txt +_him_, pg31100.txt +_him_. pg31100.txt, pg3200.txt +_him_." pg31100.txt +_his_ pg31100.txt +_his_." pg31100.txt +_housebreaking_ pg31100.txt +_how_? pg3200.txt +_i_ pg31100.txt, pg3200.txt +_in!_" pg3200.txt +_ingratitude_--" pg31100.txt +_is_ pg31100.txt, pg3200.txt +_is_." pg31100.txt +_it_.--this pg31100.txt +_joseph pg31100.txt +_julia_ pg31100.txt +_kin_ pg3200.txt +_know_." pg3200.txt +_looked_ pg31100.txt +_loved_ pg31100.txt +_make_ pg31100.txt +_manner_ pg31100.txt +_manners_ pg31100.txt +_maria's_ pg31100.txt +_marster_--" pg3200.txt +_may_ pg31100.txt +_may_, pg31100.txt +_me._ pg31100.txt +_me_ pg31100.txt +_me_!" pg31100.txt +_me_, pg31100.txt +_me_. pg31100.txt +_me_." pg31100.txt +_me_; pg31100.txt +_meaning_, pg31100.txt, pg3200.txt +_men_." pg3200.txt +_mine_." pg31100.txt +_missish_, pg31100.txt +_more!_" pg3200.txt +_more_ pg31100.txt +_most_ pg31100.txt +_mr._ pg31100.txt +_must_ pg31100.txt +_my_ pg31100.txt +_nearer_." pg31100.txt +_niggers_; pg3200.txt +_no_, pg31100.txt +_not._ pg31100.txt +_not_ pg31100.txt, pg3200.txt +_now._" pg31100.txt +_now_ pg31100.txt +_now_?" pg31100.txt +_of_ pg31100.txt +_of_fensive pg3200.txt +_once_, pg31100.txt +_one_ pg31100.txt +_one_, pg31100.txt +_other_, pg31100.txt +_our_ pg31100.txt +_out_, pg31100.txt +_out_. pg31100.txt +_out_; pg31100.txt +_own_ pg31100.txt +_party_ pg31100.txt +_perdu_ pg31100.txt +_perfecting_ pg31100.txt +_persuasion_; pg31100.txt +_plaisirs_; pg31100.txt +_price_ pg31100.txt +_pride pg31100.txt +_promised_ pg31100.txt +_prove_ pg3200.txt +_right_ pg31100.txt +_robbed_, pg3200.txt +_sacrifice_," pg31100.txt +_said_ pg31100.txt +_sensation_ pg31100.txt +_sense pg31100.txt +_sentiment_ pg31100.txt +_settled_." pg3200.txt +_shall_ pg31100.txt +_she_ pg31100.txt +_she_, pg31100.txt +_short_, pg3200.txt +_should_ pg31100.txt +_should_." pg31100.txt +_sir_ pg31100.txt +_so_ pg31100.txt +_some_ pg31100.txt +_something_ pg3200.txt +_son_--" pg3200.txt +_sposo_, pg31100.txt +_strike_!" pg3200.txt +_surprised_ pg31100.txt +_tete-a-tete_ pg31100.txt +_that._" pg31100.txt +_that_ pg31100.txt, pg3200.txt +_that_, pg31100.txt +_that_,"--said pg31100.txt +_that_." pg31100.txt +_that_; pg31100.txt +_the_ pg31100.txt +_theatre_, pg31100.txt +_their_ pg31100.txt +_them._" pg31100.txt +_them_, pg31100.txt +_them_. pg31100.txt, pg3200.txt +_them_." pg31100.txt +_then_ pg31100.txt, pg3200.txt +_then_, pg31100.txt +_there_ pg31100.txt +_they_ pg31100.txt, pg3200.txt +_thing_ pg31100.txt +_think_ pg31100.txt +_think_--but--" pg3200.txt +_this_ pg31100.txt +_thought_ pg31100.txt +_three_; pg31100.txt +_times_ pg31100.txt +_title pg31100.txt +_told_ pg31100.txt +_tolerable_." pg31100.txt +_tout_ pg31100.txt +_tried_ pg31100.txt +_try_ pg31100.txt +_two_. pg31100.txt +_us_ pg31100.txt, pg3200.txt +_us_." pg31100.txt +_very_ pg31100.txt +_vivant_, pg31100.txt +_was_ pg31100.txt +_wasn't_ pg3200.txt +_we_ pg31100.txt +_we_?" pg31100.txt +_weekly pg3200.txt +_were_ pg31100.txt +_what_?" pg3200.txt +_when_ pg31100.txt +_whereabouts_ pg3200.txt +_who_ pg31100.txt +_who_?" pg3200.txt +_whom_ pg3200.txt +_wid_ pg3200.txt +_will_ pg31100.txt +_wishes_ pg31100.txt +_within_, pg31100.txt +_would_ pg31100.txt, pg3200.txt +_yonder_." pg3200.txt +_you_ pg31100.txt, pg3200.txt +_you_, pg31100.txt +_you_. pg31100.txt +_you_." pg31100.txt +_you_; pg31100.txt +_you_?" pg31100.txt +_your_ pg31100.txt +`a pg3200.txt +a'ms. pg3200.txt +a'n't pg31100.txt +a, pg3200.txt +a- pg3200.txt, pg100.txt +a-- pg3200.txt +a--" pg3200.txt +a----." pg3200.txt +a--well, pg3200.txt +a-bat-fowling. pg100.txt +a-bed pg3200.txt +a-bed. pg100.txt +a-begging." pg3200.txt +a-biling pg3200.txt +a-biling. pg3200.txt +a-black-berrying pg3200.txt +a-blackberrying pg3200.txt +a-blazing pg3200.txt +a-blazing, pg3200.txt +a-bleeding; pg100.txt +a-boomin'! pg3200.txt +a-booming pg3200.txt +a-breeding. pg100.txt +a-capering pg3200.txt +a-chawed pg3200.txt +a-comin'! pg3200.txt +a-comin'." pg3200.txt +a-coming pg3200.txt +a-coming, pg3200.txt +a-coming. pg3200.txt +a-coming.' pg3200.txt +a-coming; pg3200.txt +a-cryin' pg3200.txt +a-day, pg31100.txt +a-doing. pg100.txt +a-doing; pg100.txt +a-doting, pg100.txt +a-drinkin'?" pg3200.txt +a-drinking?" pg3200.txt +a-ducking; pg100.txt +a-field; pg100.txt +a-fishing pg3200.txt +a-fishing; pg3200.txt +a-foot.' pg100.txt +a-foot? pg100.txt +a-front pg3200.txt +a-fumbling pg3200.txt +a-gazing pg3200.txt +a-gitt'n pg3200.txt +a-going pg3200.txt +a-going. pg3200.txt +a-going." pg3200.txt +a-going? pg100.txt +a-growing pg3200.txt +a-helpin' pg3200.txt +a-holt pg3200.txt +a-hooting. pg100.txt +a-hoping pg3200.txt +a-howling pg3200.txt +a-hunting pg3200.txt +a-laughing. pg3200.txt +a-layin' pg3200.txt +a-life, pg100.txt +a-listening." pg3200.txt +a-looking pg3200.txt +a-making, pg100.txt +a-men!" pg3200.txt +a-n-d-when pg3200.txt +a-night pg100.txt +a-piece!" pg31100.txt +a-piece. pg31100.txt +a-playing?" pg3200.txt +a-prancing pg3200.txt +a-purpose." pg3200.txt +a-quaking pg3200.txt +a-rabs pg3200.txt +a-raging pg3200.txt +a-ripping pg3200.txt +a-rolling, pg100.txt +a-running pg3200.txt +a-rustling pg3200.txt +a-sayin' pg3200.txt +a-saying--" pg3200.txt +a-sett'n' pg3200.txt +a-shaking pg3200.txt +a-shoving pg3200.txt +a-sitting pg3200.txt +a-skimmin' pg3200.txt +a-sleeping, pg100.txt +a-snuffling: pg3200.txt +a-sprawlin'. pg3200.txt +a-standing pg3200.txt +a-stoopin' pg3200.txt +a-thrashing pg3200.txt +a-threatening pg3200.txt +a-treadin' pg3200.txt +a-tryin' pg3200.txt +a-wanted pg3200.txt +a-watering pg3200.txt +a-wiping. pg3200.txt +a-working. pg3200.txt +a-year pg31100.txt +a-year--were pg31100.txt +a. pg31100.txt, pg3200.txt +a.) pg3200.txt +a., pg3200.txt +a..m., pg3200.txt +a.d. pg3200.txt +a.d.--which pg3200.txt +a.m. pg3200.txt +a.m., pg3200.txt +aachen pg3200.txt +aaron pg100.txt +aaron! pg100.txt +aaron, pg100.txt +aart." pg3200.txt +abaft pg3200.txt +abandon pg3200.txt +abandon.- pg100.txt +abandoned pg3200.txt +abandoned. pg3200.txt +abasement. pg3200.txt +abashed pg31100.txt, pg3200.txt +abashed. pg3200.txt +abate pg31100.txt, pg100.txt +abate, pg100.txt +abate. pg100.txt +abatement pg3200.txt +abating, pg31100.txt +abbess pg100.txt +abbess! pg100.txt +abbess. pg100.txt +abbey pg31100.txt, pg3200.txt, pg100.txt +abbey! pg31100.txt +abbey, pg31100.txt, pg3200.txt +abbey-mill." pg31100.txt +abbey. pg31100.txt, pg100.txt +abbey." pg31100.txt +abbey; pg31100.txt +abbeyland; pg31100.txt +abbeys pg31100.txt +abbot pg3200.txt +abbot, pg3200.txt, pg100.txt +abbot: pg3200.txt +abbots pg31100.txt +abbreviate pg3200.txt +abbreviated. pg3200.txt +abdul-aziz, pg3200.txt +abe," pg3200.txt +abed pg3200.txt +abed, pg3200.txt, pg100.txt +abed. pg3200.txt, pg100.txt +abel pg3200.txt +abel. pg3200.txt +abelard pg3200.txt +aber-- pg3200.txt +aberdeen; pg3200.txt +abergavenny pg100.txt +abergavenny. pg100.txt +abermals! pg3200.txt +aberration pg3200.txt +aberration. pg3200.txt +aberration; pg3200.txt +aberrations pg3200.txt +abgeht? pg3200.txt +abhominable- pg100.txt +abhor pg100.txt +abhor, pg31100.txt, pg100.txt +abhor. pg100.txt +abhorr'd pg100.txt +abhorred pg31100.txt, pg3200.txt, pg100.txt +abhorrence. pg31100.txt +abhorrence." pg31100.txt +abhors pg100.txt +abhors. pg100.txt +abhorson pg100.txt +abide pg31100.txt, pg3200.txt, pg100.txt +abide) pg100.txt +abide, pg100.txt +abide. pg3200.txt, pg100.txt +abide." pg3200.txt +abide; pg100.txt +abide? pg100.txt +abides pg3200.txt +abides, pg100.txt +abides. pg100.txt +abideth pg3200.txt +abiding pg3200.txt +abilities pg31100.txt, pg3200.txt, pg100.txt +abilities! pg31100.txt +abilities, pg31100.txt +abilities," pg3200.txt +abilities. pg3200.txt +abilities; pg31100.txt +ability pg3200.txt, pg100.txt +ability's pg100.txt +ability, pg3200.txt, pg100.txt +ability. pg3200.txt +ability?" pg3200.txt +abject pg3200.txt +abjectly pg100.txt +abjects; pg100.txt +abjur'd. pg100.txt +abjuration. pg3200.txt +abjure pg100.txt +abjure. pg3200.txt +abjure?" pg3200.txt +ablaze pg3200.txt +able pg31100.txt, pg3200.txt, pg100.txt +able, pg3200.txt +able- pg100.txt +able. pg3200.txt +abler pg3200.txt +ablest pg3200.txt +ably pg3200.txt +abner pg3200.txt +abnormal pg3200.txt +aboard pg3200.txt +aboard!" pg3200.txt +aboard, pg3200.txt, pg100.txt +aboard. pg3200.txt, pg100.txt +aboard." pg3200.txt +aboard; pg100.txt +aboard? pg3200.txt, pg100.txt +aboard?" pg3200.txt +abode pg31100.txt, pg3200.txt, pg100.txt +abode! pg100.txt +abode, pg31100.txt +abode. pg100.txt +abode; pg100.txt +abode? pg100.txt +aboded pg100.txt +abolish pg31100.txt, pg3200.txt +abolished pg3200.txt +abolished, pg3200.txt +abolished. pg3200.txt +abolished." pg3200.txt +abolition." pg31100.txt +abolitionist pg3200.txt +abolitionist--straight pg3200.txt +abominable pg31100.txt, pg100.txt +abominable! pg31100.txt +abominable, pg31100.txt +abominable. pg100.txt +abominably. pg100.txt +abominate pg31100.txt +aboriginal pg3200.txt +aboriginals pg3200.txt +aborigines, pg3200.txt +abortionists pg3200.txt +abortive pg100.txt +abound pg3200.txt, pg100.txt +abound, pg3200.txt +abound. pg100.txt +abound: pg100.txt +abounding pg31100.txt +about! pg31100.txt, pg3200.txt, pg100.txt +about!" pg31100.txt, pg3200.txt +about, pg31100.txt, pg3200.txt, pg100.txt +about-- pg3200.txt +about--" pg3200.txt +about--as pg3200.txt +about--indeed, pg3200.txt +about--it's pg3200.txt +about--that?" pg3200.txt +about. pg31100.txt, pg3200.txt, pg100.txt +about." pg31100.txt, pg3200.txt +about.' pg3200.txt +about: pg3200.txt +about; pg31100.txt, pg3200.txt, pg100.txt +about? pg3200.txt, pg100.txt +about?" pg31100.txt, pg3200.txt +above pg31100.txt, pg3200.txt, pg100.txt +above! pg100.txt +above, pg31100.txt, pg3200.txt, pg100.txt +above-work, pg3200.txt +above. pg31100.txt, pg3200.txt, pg100.txt +above." pg3200.txt +above: pg31100.txt +above; pg100.txt +abovementioned pg3200.txt +abraham pg3200.txt +abraham's pg3200.txt +abraham, pg3200.txt +abraham." pg3200.txt +abrasion, pg3200.txt +abrasions pg3200.txt +abreast pg3200.txt +abreast, pg100.txt +abreast; pg100.txt +abridg'd pg100.txt +abridge pg31100.txt +abridged pg100.txt +abridgment pg100.txt +abris pg3200.txt +abroach pg100.txt +abroach? pg100.txt +abroad pg31100.txt, pg3200.txt, pg100.txt +abroad! pg100.txt +abroad" pg3200.txt +abroad' pg3200.txt +abroad, pg31100.txt, pg3200.txt, pg100.txt +abroad,' pg3200.txt +abroad- pg100.txt +abroad--sepulchres pg3200.txt +abroad. pg31100.txt, pg3200.txt, pg100.txt +abroad." pg31100.txt, pg3200.txt +abroad."-- pg3200.txt +abroad.) pg3200.txt +abroad; pg100.txt +abroad? pg100.txt +abroad?" pg31100.txt, pg3200.txt +abrook pg100.txt +abrupt pg31100.txt +abrupt. pg3200.txt +abrupt." pg3200.txt +abrupt; pg31100.txt +abruption? pg100.txt +abruptly pg31100.txt, pg3200.txt +abruptly, pg31100.txt +abruptly-- pg3200.txt +abruptly. pg31100.txt +abruptly; pg3200.txt +abruptly?" pg31100.txt +abruptness pg3200.txt +abruptness. pg3200.txt +abscesses. pg3200.txt +absence pg31100.txt, pg3200.txt, pg100.txt +absence, pg31100.txt, pg3200.txt, pg100.txt +absence--he pg31100.txt +absence. pg31100.txt, pg3200.txt, pg100.txt +absence? pg100.txt +absence?" pg31100.txt +absences, pg3200.txt +absent pg31100.txt, pg3200.txt +absent, pg31100.txt, pg3200.txt +absent-minded pg3200.txt +absent-minded. pg3200.txt +absent-mindedness pg3200.txt +absent. pg31100.txt, pg3200.txt, pg100.txt +absent; pg3200.txt, pg100.txt +absent?" pg31100.txt, pg3200.txt +absentee pg3200.txt +absenting pg31100.txt +absently pg3200.txt +absently, pg3200.txt +absently: pg3200.txt +abso--" pg3200.txt +absolute pg31100.txt, pg3200.txt, pg100.txt +absolute, pg3200.txt, pg100.txt +absolute. pg3200.txt +absolute: pg3200.txt +absolute; pg100.txt +absolutely pg31100.txt, pg3200.txt +absolutely--unless pg3200.txt +absolution, pg3200.txt +absolution. pg3200.txt +absolv'd. pg100.txt +absolve pg3200.txt +absolved pg3200.txt +absorb pg3200.txt +absorbed pg3200.txt +absorbed, pg3200.txt +absorbedly pg3200.txt +absorbing pg3200.txt +absorbingly, pg3200.txt +abstain pg3200.txt +abstemious, pg100.txt +abstention pg3200.txt +abstinence, pg3200.txt, pg100.txt +abstract pg100.txt +abstracted pg3200.txt +abstraction pg3200.txt +abstraction,--observed, pg3200.txt +abstractions pg3200.txt +absurd pg31100.txt, pg3200.txt +absurd, pg31100.txt +absurd. pg31100.txt, pg3200.txt +absurdities pg3200.txt +absurdities, pg31100.txt, pg3200.txt +absurdity pg31100.txt, pg3200.txt +absurdity. pg3200.txt +absurdity." pg3200.txt +absurdity.' pg3200.txt +absurdly pg3200.txt +abundance pg31100.txt, pg3200.txt +abundance, pg3200.txt, pg100.txt +abundance. pg100.txt +abundance.' pg3200.txt +abundance: pg31100.txt +abundance; pg31100.txt +abundance? pg100.txt +abundant pg3200.txt +abundantly pg31100.txt, pg3200.txt +abus'd pg100.txt +abus'd, pg100.txt +abus'd. pg100.txt +abus'd; pg100.txt +abuse pg31100.txt, pg3200.txt, pg100.txt +abuse! pg100.txt +abuse, pg3200.txt, pg100.txt +abuse- pg100.txt +abuse. pg3200.txt, pg100.txt +abuse." pg3200.txt +abuse? pg100.txt +abused pg31100.txt, pg3200.txt, pg100.txt +abused! pg100.txt +abused. pg3200.txt, pg100.txt +abused." pg3200.txt +abused; pg100.txt +abuses! pg100.txt +abuses. pg100.txt +abuses." pg31100.txt +abusing pg31100.txt, pg3200.txt +abusive pg3200.txt +aby pg100.txt +abydos; pg31100.txt +abysmal, pg3200.txt +abyss pg3200.txt +abyss; pg3200.txt +academe, pg100.txt +academes, pg100.txt +academie pg3200.txt +academy pg3200.txt +academy, pg3200.txt +academy. pg3200.txt +acapulco, pg3200.txt +accede pg31100.txt +acceded pg3200.txt +accent pg31100.txt, pg3200.txt, pg100.txt +accent, pg31100.txt +accent-- pg3200.txt +accent. pg3200.txt +accent: pg3200.txt, pg100.txt +accent?" pg3200.txt +accented pg3200.txt +accented, pg3200.txt +accents pg3200.txt +accents: pg31100.txt +accept pg31100.txt, pg3200.txt, pg100.txt +accept. pg31100.txt, pg100.txt +accept; pg3200.txt +acceptable pg31100.txt, pg3200.txt +acceptable, pg3200.txt +acceptable. pg31100.txt +acceptable." pg31100.txt +acceptance pg31100.txt, pg3200.txt +acceptance, pg3200.txt +acceptance. pg31100.txt, pg3200.txt +acceptance: pg100.txt +acceptances." pg3200.txt +accepted pg31100.txt, pg3200.txt +accepted, pg3200.txt +accepted. pg31100.txt, pg3200.txt +accepted." pg31100.txt +accepted? pg3200.txt +accepting pg31100.txt, pg3200.txt +accepting, pg31100.txt +accepts pg31100.txt, pg3200.txt, pg100.txt +accepts, pg3200.txt +access pg31100.txt, pg3200.txt, pg100.txt +access. pg3200.txt, pg100.txt +accessary. pg100.txt +accessions pg3200.txt +accessories pg3200.txt +accessories. pg3200.txt +accessory pg31100.txt, pg3200.txt +accidence. pg100.txt +accident pg31100.txt, pg3200.txt, pg100.txt +accident" pg3200.txt +accident, pg31100.txt, pg3200.txt, pg100.txt +accident. pg31100.txt, pg3200.txt, pg100.txt +accident." pg31100.txt, pg3200.txt +accident; pg3200.txt +accident? pg3200.txt +accident?" pg3200.txt +accidental pg31100.txt, pg3200.txt +accidental, pg3200.txt +accidentally pg31100.txt, pg100.txt +accidentally, pg100.txt +accidentally. pg3200.txt +accidents pg3200.txt, pg100.txt +accidents, pg3200.txt +accidents. pg3200.txt, pg100.txt +accidents; pg3200.txt, pg100.txt +accite, pg100.txt +accolade pg3200.txt +accommodate pg31100.txt, pg3200.txt, pg100.txt +accommodate. pg3200.txt +accommodated pg100.txt +accommodating pg31100.txt, pg3200.txt +accommodating? pg3200.txt +accommodation pg31100.txt, pg3200.txt +accommodation." pg31100.txt, pg3200.txt +accommodations pg31100.txt, pg3200.txt +accommodations. pg3200.txt +accompanied pg31100.txt, pg3200.txt, pg100.txt +accompanied? pg100.txt +accompanies pg3200.txt +accompaniment pg3200.txt +accompaniments pg3200.txt +accompany pg31100.txt, pg3200.txt, pg100.txt +accompany. pg100.txt +accompanying pg31100.txt, pg3200.txt +accomplices! pg100.txt +accomplish pg31100.txt, pg3200.txt +accomplish'd. pg100.txt +accomplish, pg31100.txt, pg100.txt +accomplish. pg3200.txt +accomplished pg31100.txt, pg3200.txt, pg100.txt +accomplished! pg3200.txt +accomplished, pg31100.txt, pg3200.txt +accomplished. pg3200.txt +accomplished." pg31100.txt, pg3200.txt +accomplished; pg3200.txt, pg100.txt +accomplishes pg3200.txt +accomplishing pg3200.txt +accomplishment, pg31100.txt +accomplishments pg31100.txt +accomplishments, pg31100.txt, pg3200.txt +accomplishments; pg31100.txt +accompt, pg100.txt +accompt. pg100.txt +accord pg31100.txt, pg3200.txt, pg100.txt +accord, pg3200.txt, pg100.txt +accord- pg100.txt +accord--and pg3200.txt +accord. pg3200.txt, pg100.txt +accord; pg3200.txt +accord? pg100.txt +accord?' pg3200.txt +accordance pg31100.txt, pg3200.txt, pg100.txt +accordant pg31100.txt +accordant: pg31100.txt +accorded pg3200.txt +accorded, pg100.txt +accorded. pg3200.txt +accordin', pg3200.txt +according pg31100.txt, pg3200.txt, pg100.txt +according. pg3200.txt +accordingly pg31100.txt, pg3200.txt, pg100.txt +accordingly! pg100.txt +accordingly, pg31100.txt, pg3200.txt +accordingly. pg31100.txt, pg3200.txt, pg100.txt +accordingly." pg31100.txt, pg3200.txt +accordingly: pg3200.txt +accords. pg100.txt +accost- pg100.txt +accost. pg100.txt +account pg31100.txt, pg3200.txt, pg100.txt +account, pg31100.txt, pg3200.txt, pg100.txt +account- pg100.txt +account--an pg3200.txt +account--she pg31100.txt +account. pg31100.txt, pg3200.txt, pg100.txt +account." pg31100.txt, pg3200.txt +account.--i pg31100.txt +account.--of pg31100.txt +account; pg31100.txt, pg3200.txt, pg100.txt +account? pg3200.txt +accountable pg3200.txt +accounted pg31100.txt, pg3200.txt +accounting pg31100.txt +accounts pg31100.txt, pg3200.txt +accounts, pg31100.txt, pg3200.txt, pg100.txt +accounts. pg3200.txt +accounts." pg31100.txt +accounts; pg31100.txt +accoutrement, pg100.txt +accoutrements, pg100.txt +accoutrements. pg3200.txt +accredited pg3200.txt +accrue pg31100.txt +accrue. pg100.txt +accruing pg3200.txt +accruing--you pg3200.txt +accumulate, pg100.txt +accumulate; pg100.txt +accumulated pg3200.txt, pg100.txt +accumulating pg3200.txt +accumulation pg3200.txt +accumulation--shakespeare! pg3200.txt +accumulations pg3200.txt +accuracy pg3200.txt +accuracy, pg3200.txt +accuracy. pg3200.txt +accurate pg3200.txt +accurate, pg3200.txt +accurate--is, pg3200.txt +accurate; pg3200.txt +accurately pg31100.txt, pg3200.txt +accurately. pg3200.txt +accurately; pg3200.txt +accurs'd pg100.txt +accurs'd! pg100.txt +accurs'd, pg100.txt +accurs'd. pg100.txt +accursed pg3200.txt, pg100.txt +accursed! pg100.txt +accurst! pg100.txt +accurst, pg100.txt +accus'd pg100.txt +accus'd, pg100.txt +accus'd- pg100.txt +accus'd. pg100.txt +accusation pg3200.txt, pg100.txt +accusation, pg31100.txt, pg100.txt +accusation. pg100.txt +accusation; pg3200.txt +accusations pg31100.txt, pg100.txt +accusations, pg31100.txt +accusations. pg100.txt +accusations; pg100.txt +accusative pg3200.txt +accuse pg31100.txt, pg3200.txt, pg100.txt +accuse. pg100.txt +accuse? pg100.txt +accused pg31100.txt, pg3200.txt +accused!" pg3200.txt +accused), pg3200.txt +accused, pg31100.txt, pg3200.txt +accused. pg3200.txt +accused." pg3200.txt +accused: pg3200.txt +accuser pg3200.txt +accuser." pg3200.txt +accusers pg100.txt +accusers, pg100.txt +accusers. pg100.txt +accusers; pg100.txt +accusing pg3200.txt +accusingly; pg3200.txt +accustomed pg31100.txt, pg3200.txt +accustomed, pg31100.txt +ace pg3200.txt +ace. pg100.txt +aces! pg3200.txt +ache pg3200.txt, pg100.txt +ache, pg100.txt +ache. pg3200.txt, pg100.txt +ache." pg3200.txt +ached pg31100.txt, pg3200.txt +ached. pg31100.txt +acheron pg100.txt +acheron, pg100.txt +aches pg3200.txt, pg100.txt +aches." pg31100.txt +acheve pg3200.txt +achiev'd, pg100.txt +achiev'd. pg100.txt +achieve pg3200.txt, pg100.txt +achieve, pg100.txt +achieved pg3200.txt +achieved, pg100.txt +achieved. pg31100.txt +achievement pg3200.txt +achievement, pg31100.txt, pg3200.txt +achievement. pg3200.txt +achievement; pg3200.txt +achievement? pg3200.txt +achievements pg3200.txt +achievements, pg3200.txt +achievements. pg3200.txt +achievements." pg31100.txt +achieves? pg3200.txt +achieving pg3200.txt +achilles pg3200.txt, pg100.txt +achilles! pg100.txt +achilles' pg100.txt +achilles, pg100.txt +achilles," pg3200.txt +achilles- pg100.txt +achilles. pg100.txt +achilles; pg100.txt +achilles? pg100.txt +aching pg31100.txt +acid. pg3200.txt +acids, pg3200.txt +acknowledg'd, pg100.txt +acknowledg-ed pg3200.txt +acknowledge pg31100.txt, pg3200.txt, pg100.txt +acknowledge. pg31100.txt +acknowledge; pg100.txt +acknowledge?" pg31100.txt +acknowledged pg31100.txt, pg3200.txt +acknowledged, pg31100.txt +acknowledged. pg31100.txt +acknowledging pg31100.txt +acknowledgment pg31100.txt, pg3200.txt +acknowledgment, pg100.txt +acknowledgment. pg31100.txt, pg3200.txt +acknowledgment: pg3200.txt +acknowledgments, pg31100.txt +acknowledgments. pg31100.txt +acold. pg100.txt +acolyte, pg3200.txt +acorn, pg3200.txt +acorn. pg100.txt +acquaint pg31100.txt, pg100.txt +acquaintance pg31100.txt, pg3200.txt, pg100.txt +acquaintance, pg31100.txt, pg100.txt +acquaintance--! pg31100.txt +acquaintance--an pg31100.txt +acquaintance--and pg31100.txt +acquaintance--now pg3200.txt +acquaintance. pg31100.txt, pg3200.txt, pg100.txt +acquaintance." pg31100.txt +acquaintance; pg31100.txt +acquaintance? pg31100.txt, pg100.txt +acquaintance?" pg31100.txt +acquaintances pg31100.txt, pg3200.txt +acquaintances, pg3200.txt +acquaintances. pg3200.txt +acquaintances; pg3200.txt +acquaintanceship pg3200.txt +acquaintanceship, pg3200.txt +acquainted pg31100.txt, pg3200.txt, pg100.txt +acquainted, pg31100.txt, pg3200.txt, pg100.txt +acquainted--" pg31100.txt +acquainted. pg31100.txt, pg3200.txt, pg100.txt +acquainted? pg100.txt +acquainted?" pg31100.txt +acquaints pg100.txt +acquiesce. pg31100.txt +acquiescence, pg31100.txt +acquiescence. pg31100.txt +acquiescence? pg31100.txt +acquir'd pg100.txt +acquir'd, pg100.txt +acquire pg3200.txt +acquired pg31100.txt, pg3200.txt +acquired, pg3200.txt +acquired--for pg3200.txt +acquired. pg3200.txt +acquirement pg3200.txt +acquirement, pg3200.txt +acquirement. pg3200.txt +acquirements pg31100.txt +acquirements. pg3200.txt +acquiring pg3200.txt +acquisition pg31100.txt, pg100.txt +acquisition. pg3200.txt +acquisition." pg3200.txt +acquisitions pg3200.txt +acquit pg31100.txt, pg3200.txt +acquittal pg3200.txt +acquittal, pg3200.txt +acquittance pg100.txt +acquittances pg100.txt +acquitted pg31100.txt, pg100.txt +acquitted, pg31100.txt, pg3200.txt +acquitted. pg3200.txt +acquitted." pg31100.txt, pg3200.txt +acre pg31100.txt, pg3200.txt +acre! pg3200.txt +acre!" pg3200.txt +acre." pg3200.txt +acreage pg3200.txt +acres pg3200.txt +acres, pg3200.txt +acrimony: pg3200.txt +acropolis pg3200.txt +acropolis, pg3200.txt +acropolis. pg3200.txt +across pg31100.txt, pg3200.txt, pg100.txt +across! pg100.txt +across, pg3200.txt +across. pg31100.txt, pg3200.txt, pg100.txt +across; pg3200.txt, pg100.txt +across? pg100.txt +acrost pg3200.txt +act pg31100.txt, pg3200.txt, pg100.txt +act! pg100.txt +act, pg31100.txt, pg3200.txt, pg100.txt +act--" pg3200.txt +act--and pg3200.txt +act. pg31100.txt, pg3200.txt, pg100.txt +act." pg31100.txt, pg3200.txt +act; pg3200.txt, pg100.txt +act? pg3200.txt, pg100.txt +act?" pg3200.txt +act_2|sc_7 pg100.txt +act_3|sc_1 pg100.txt +act_3|sc_10 pg100.txt +act_3|sc_11 pg100.txt +act_3|sc_12 pg100.txt +act_3|sc_13 pg100.txt +act_3|sc_2 pg100.txt +act_3|sc_3 pg100.txt +act_3|sc_4 pg100.txt +act_3|sc_5 pg100.txt +act_3|sc_6 pg100.txt +act_3|sc_7 pg100.txt +act_3|sc_8 pg100.txt +act_3|sc_9 pg100.txt +act_4|sc_1 pg100.txt +act_4|sc_10 pg100.txt +act_4|sc_11 pg100.txt +act_4|sc_12 pg100.txt +act_4|sc_13 pg100.txt +act_4|sc_14 pg100.txt +act_4|sc_15 pg100.txt +act_4|sc_2 pg100.txt +act_4|sc_3 pg100.txt +act_4|sc_4 pg100.txt +act_4|sc_5 pg100.txt +act_4|sc_6 pg100.txt +act_4|sc_7 pg100.txt +act_4|sc_8 pg100.txt +act_4|sc_9 pg100.txt +act_5|sc_1 pg100.txt +act_5|sc_2 pg100.txt +acted pg31100.txt, pg3200.txt +acted! pg3200.txt +acted," pg3200.txt +acted. pg100.txt +acted; pg3200.txt, pg100.txt +acting pg31100.txt, pg3200.txt, pg100.txt +acting! pg31100.txt +acting, pg31100.txt +acting. pg31100.txt +acting." pg31100.txt +acting; pg3200.txt +acting?" pg31100.txt +actinic pg3200.txt +actinic-ray pg3200.txt +action pg31100.txt, pg3200.txt, pg100.txt +action! pg100.txt +action, pg31100.txt, pg3200.txt, pg100.txt +action- pg100.txt +action. pg31100.txt, pg3200.txt, pg100.txt +action." pg3200.txt +action; pg100.txt +action? pg100.txt +actions pg31100.txt, pg3200.txt, pg100.txt +actions, pg31100.txt, pg100.txt +actions. pg31100.txt, pg100.txt +actions." pg31100.txt +actions?" pg3200.txt +actium pg100.txt +active pg31100.txt, pg3200.txt +active, pg31100.txt, pg3200.txt +active--looks pg31100.txt +active. pg3200.txt +active; pg3200.txt +activities pg3200.txt +activities, pg3200.txt +activities. pg3200.txt +activity pg31100.txt, pg3200.txt +activity, pg31100.txt, pg3200.txt +activity. pg31100.txt +actor pg3200.txt, pg100.txt +actor, pg3200.txt +actor. pg3200.txt, pg100.txt +actor.' pg100.txt +actor; pg3200.txt +actors pg3200.txt +actors, pg3200.txt, pg100.txt +actors? pg100.txt +actress pg3200.txt +actress, pg3200.txt +actress?" pg3200.txt +actresses pg3200.txt +actresses, pg31100.txt +acts pg31100.txt, pg3200.txt, pg100.txt +acts, pg3200.txt, pg100.txt +acts. pg3200.txt +acts; pg3200.txt, pg100.txt +acts? pg3200.txt +actual pg31100.txt, pg3200.txt +actual, pg3200.txt +actuality pg3200.txt +actuality. pg3200.txt +actually pg31100.txt, pg3200.txt +actually?" pg3200.txt +actuated pg31100.txt +acute. pg31100.txt +acutely pg31100.txt +acutely. pg100.txt +acutest pg31100.txt +ad- pg3200.txt +ad. pg3200.txt +adage? pg100.txt +adair, pg3200.txt +adallas; pg100.txt +adam pg3200.txt, pg100.txt +adam, pg3200.txt, pg100.txt +adam,--resolved pg3200.txt +adam-clam pg3200.txt +adam. pg100.txt +adam; pg3200.txt +adamant; pg100.txt +adamantine pg3200.txt +adamiensis. pg3200.txt +adams pg3200.txt +adams. pg3200.txt +adams? pg3200.txt +adaptability. pg3200.txt +adapted pg31100.txt, pg3200.txt +adapting pg31100.txt +add pg31100.txt, pg3200.txt, pg100.txt +add. pg31100.txt +added pg31100.txt, pg3200.txt, pg100.txt +added, pg31100.txt, pg3200.txt +added-- pg31100.txt, pg3200.txt +added. pg3200.txt, pg100.txt +added." pg31100.txt +added.[2] pg3200.txt +added: pg31100.txt, pg3200.txt +added:-- pg31100.txt +added; pg31100.txt +added? pg3200.txt +adder pg100.txt +adder, pg100.txt +addicted pg100.txt +adding pg31100.txt, pg3200.txt +adding, pg31100.txt, pg3200.txt +adding-- pg3200.txt +adding: pg31100.txt, pg3200.txt +addition pg31100.txt, pg3200.txt, pg100.txt +addition, pg3200.txt, pg100.txt +addition. pg3200.txt, pg100.txt +addition." pg31100.txt +addition? pg100.txt +additional pg31100.txt, pg3200.txt, pg100.txt +additionally pg3200.txt +additions pg31100.txt, pg3200.txt +additions, pg3200.txt +additions?" pg3200.txt +addled pg3200.txt +addling, pg3200.txt +address pg31100.txt, pg3200.txt, pg100.txt +address! pg31100.txt +address'd pg100.txt +address'd. pg100.txt +address): pg3200.txt +address, pg31100.txt, pg3200.txt +address. pg31100.txt, pg3200.txt +address." pg3200.txt +address.' pg3200.txt +address: pg3200.txt +address; pg31100.txt, pg3200.txt +addressed pg31100.txt, pg3200.txt +addressed, pg31100.txt +addresses pg31100.txt, pg3200.txt +addresses. pg31100.txt, pg3200.txt +addresses." pg31100.txt +addressing pg31100.txt, pg3200.txt +addrest pg100.txt +addrest. pg100.txt +adds pg3200.txt +adds. pg100.txt +adduced: pg3200.txt +adeiu pg31100.txt +adeiu. pg31100.txt +adelaide pg3200.txt +adelaide, pg3200.txt +adelaide--a pg3200.txt +aden-suez,...................1,346 pg3200.txt +adept pg3200.txt +adepts. pg3200.txt +adequate pg3200.txt +adequate, pg3200.txt +adhered pg31100.txt +adhering pg3200.txt +adieu pg31100.txt, pg3200.txt +adieu! pg31100.txt, pg100.txt +adieu!" pg3200.txt +adieu, pg31100.txt, pg100.txt +adieu- pg100.txt +adieu. pg31100.txt, pg100.txt +adieu; pg100.txt +adieus pg31100.txt +adieux, pg3200.txt +adieux. pg3200.txt +adios." pg3200.txt +adjacent pg3200.txt +adjective. pg3200.txt +adjectives pg3200.txt +adjectives. pg3200.txt +adjoined pg31100.txt, pg3200.txt +adjoining pg31100.txt +adjoining, pg31100.txt +adjoining. pg3200.txt +adjourn pg3200.txt +adjourn! pg3200.txt +adjourn'd pg100.txt +adjourn, pg3200.txt +adjourn. pg3200.txt +adjourned pg3200.txt +adjourns. pg3200.txt +adjure pg3200.txt +adjust pg3200.txt +adjustable pg3200.txt +adjusted pg3200.txt +adler pg3200.txt +adler! pg3200.txt +adler. pg3200.txt +admeasurement pg3200.txt +administer pg31100.txt +administer: pg100.txt +administered pg3200.txt +administered. pg3200.txt +administering pg31100.txt, pg3200.txt +administration pg3200.txt +administration, pg3200.txt +administration--and pg3200.txt +administration. pg3200.txt +administrator pg3200.txt +admir'd pg100.txt +admir'd. pg100.txt +admir'd; pg100.txt +admirable pg3200.txt +admirable! pg3200.txt, pg100.txt +admirable!" pg31100.txt +admirable. pg100.txt +admirably pg3200.txt +admirably." pg31100.txt +admiral pg31100.txt, pg3200.txt +admiral!" pg3200.txt +admiral's pg3200.txt +admiral, pg31100.txt, pg3200.txt, pg100.txt +admiral," pg3200.txt +admiral--his pg3200.txt +admiral. pg3200.txt +admiral." pg31100.txt +admiral; pg100.txt +admiral] pg3200.txt +admirals pg31100.txt, pg3200.txt +admirals, pg31100.txt +admirals. pg31100.txt +admiralty pg3200.txt +admiralty. pg3200.txt +admiralty: pg3200.txt +admiration pg31100.txt, pg3200.txt, pg100.txt +admiration, pg31100.txt, pg3200.txt, pg100.txt +admiration-- pg3200.txt +admiration--and pg3200.txt +admiration. pg31100.txt, pg3200.txt, pg100.txt +admiration: pg3200.txt +admiration; pg3200.txt +admiration? pg100.txt +admirations pg3200.txt +admire pg31100.txt, pg3200.txt, pg100.txt +admire, pg31100.txt, pg3200.txt, pg100.txt +admire. pg31100.txt, pg100.txt +admire." pg31100.txt +admired pg31100.txt, pg3200.txt +admired, pg31100.txt, pg3200.txt +admired. pg31100.txt, pg3200.txt +admired; pg31100.txt +admired? pg100.txt +admirer pg31100.txt, pg100.txt +admirer, pg31100.txt, pg3200.txt +admirer: pg3200.txt +admirers pg31100.txt, pg3200.txt +admirers. pg3200.txt +admires pg31100.txt, pg3200.txt +admires, pg3200.txt +admiring pg31100.txt, pg3200.txt, pg100.txt +admiringly. pg3200.txt +admiringly: pg3200.txt +admissible, pg3200.txt +admissible. pg3200.txt +admission pg31100.txt, pg3200.txt +admission. pg3200.txt +admissions pg3200.txt +admit pg31100.txt, pg3200.txt, pg100.txt +admit, pg31100.txt +admit." pg3200.txt +admit; pg100.txt +admit? pg100.txt +admits pg3200.txt, pg100.txt +admits. pg100.txt +admittance--and pg3200.txt +admittance. pg3200.txt, pg100.txt +admittance." pg31100.txt +admitted pg31100.txt, pg3200.txt, pg100.txt +admitted, pg31100.txt, pg3200.txt, pg100.txt +admitted--buried pg3200.txt +admitted. pg31100.txt, pg3200.txt, pg100.txt +admitted." pg3200.txt +admitted; pg3200.txt +admitting pg31100.txt, pg3200.txt, pg100.txt +admitting?" pg31100.txt +admixture pg3200.txt +admonish pg100.txt +admonished pg3200.txt +admonishing pg3200.txt, pg100.txt +admonishing, pg3200.txt +admonishment? pg100.txt +admonition pg3200.txt, pg100.txt +admonition. pg3200.txt +ado pg100.txt +ado, pg100.txt +ado. pg100.txt +adolescent's pg3200.txt +adolf pg3200.txt +adolf, pg3200.txt +adolf. pg3200.txt +adonis" pg3200.txt +adopt pg31100.txt, pg3200.txt +adopted pg31100.txt, pg3200.txt +adopted, pg3200.txt +adopted," pg3200.txt +adopted. pg31100.txt, pg3200.txt +adopting pg3200.txt +adoption pg3200.txt +adoption, pg3200.txt +adoption. pg3200.txt +ador'd! pg100.txt +adoration pg3200.txt +adoration. pg3200.txt +adoration? pg100.txt +adorations. pg3200.txt +adore pg3200.txt, pg100.txt +adore, pg100.txt +adore; pg100.txt +adored pg31100.txt, pg3200.txt +adored; pg31100.txt +adored?" pg3200.txt +adores pg31100.txt +adoring pg3200.txt, pg100.txt +adorn pg3200.txt +adorned pg31100.txt, pg3200.txt, pg100.txt +adorned) pg3200.txt +adorning pg3200.txt +adornment pg3200.txt +adornment. pg3200.txt +adornments!" pg3200.txt +adorns pg100.txt +adout pg3200.txt +adramadio. pg100.txt +adrian pg100.txt +adrian. pg100.txt +adriana pg100.txt +adrift pg3200.txt +adrift--derelicts; pg3200.txt +adrift. pg3200.txt +adroitness. pg3200.txt +adsum. pg100.txt +adulation. pg3200.txt +adulation? pg100.txt +adult pg3200.txt +adulterer pg3200.txt +adulteress, pg100.txt +adulteries pg100.txt +adultery pg3200.txt +adultery. pg3200.txt +adultery? pg100.txt +adultress. pg100.txt +adults pg3200.txt +adults, pg3200.txt +adults. pg3200.txt +advanc'd pg100.txt +advanc'd, pg100.txt +advanc'd; pg100.txt +advance pg31100.txt, pg3200.txt, pg100.txt +advance), pg3200.txt +advance, pg3200.txt, pg100.txt +advance-payments, pg3200.txt +advance. pg3200.txt +advance." pg3200.txt +advance; pg100.txt +advance?' pg3200.txt +advance] pg100.txt +advanced pg31100.txt, pg3200.txt, pg100.txt +advanced, pg3200.txt +advancement pg31100.txt +advancement, pg3200.txt +advancement. pg100.txt +advancement? pg100.txt +advancements; pg100.txt +advances pg31100.txt, pg3200.txt +advancing pg3200.txt +advantage pg31100.txt, pg3200.txt, pg100.txt +advantage, pg31100.txt, pg3200.txt, pg100.txt +advantage--it pg3200.txt +advantage--they pg3200.txt +advantage. pg31100.txt, pg3200.txt, pg100.txt +advantage." pg31100.txt, pg3200.txt +advantage; pg31100.txt, pg100.txt +advantage? pg3200.txt +advantage?" pg3200.txt +advantaged, pg100.txt +advantageous pg31100.txt, pg3200.txt +advantages pg31100.txt, pg3200.txt, pg100.txt +advantages, pg100.txt +advantages. pg3200.txt, pg100.txt +adventists, pg3200.txt +adventur'd pg100.txt +adventure pg31100.txt, pg3200.txt, pg100.txt +adventure, pg3200.txt +adventure. pg3200.txt, pg100.txt +adventure; pg3200.txt +adventure? pg100.txt +adventurer pg3200.txt +adventurer--that pg3200.txt +adventurer. pg3200.txt +adventurers pg3200.txt +adventures pg31100.txt, pg3200.txt +adventures, pg3200.txt, pg100.txt +adventures--" pg3200.txt +adventures. pg31100.txt, pg3200.txt +adventurous pg3200.txt +adverbs. pg3200.txt +adversaries pg3200.txt, pg100.txt +adversaries! pg100.txt +adversaries, pg100.txt +adversaries. pg100.txt +adversaries; pg3200.txt, pg100.txt +adversary pg3200.txt, pg100.txt +adversary. pg3200.txt, pg100.txt +adversary? pg100.txt +adversely, pg100.txt +adversities pg100.txt +adversity, pg100.txt +adversity. pg100.txt +adversity; pg100.txt +advertise pg3200.txt, pg100.txt +advertise. pg100.txt +advertised pg3200.txt, pg100.txt +advertised, pg100.txt +advertisement pg3200.txt +advertisement, pg100.txt +advertisement-- pg3200.txt +advertisement. pg3200.txt, pg100.txt +advertisement; pg3200.txt +advertisements pg3200.txt +advertisements. pg3200.txt +advertiser." pg3200.txt +advertiser: pg3200.txt +advertising pg3200.txt +advertising. pg3200.txt +advice pg31100.txt, pg3200.txt, pg100.txt +advice! pg3200.txt +advice) pg3200.txt +advice, pg31100.txt, pg3200.txt, pg100.txt +advice--for pg3200.txt +advice. pg31100.txt, pg3200.txt, pg100.txt +advice." pg31100.txt, pg3200.txt +advice: pg31100.txt, pg3200.txt +advice; pg31100.txt, pg100.txt +advice?" pg31100.txt +advis'd pg100.txt +advis'd, pg100.txt +advis'd. pg100.txt +advis'd: pg100.txt +advis'd; pg100.txt +advis'd? pg100.txt +advisable pg31100.txt, pg3200.txt +advisable. pg3200.txt +advisable." pg31100.txt +advise pg31100.txt, pg3200.txt, pg100.txt +advise. pg100.txt +advise: pg100.txt +advise?" pg31100.txt +advised pg31100.txt, pg3200.txt +advised, pg100.txt +advised. pg3200.txt +advisedly. pg3200.txt, pg100.txt +advisement. pg3200.txt +adviser, pg3200.txt +adviser. pg3200.txt +advisers: pg3200.txt +advises pg100.txt +advising pg3200.txt +advocacy pg3200.txt +advocate pg31100.txt +advocate, pg3200.txt, pg100.txt +advocate. pg3200.txt +advocate: pg3200.txt +advocates pg3200.txt +aeacides pg100.txt +aedile pg100.txt +aediles pg100.txt +aediles, pg100.txt +aediles. pg100.txt +aemelia pg100.txt +aemilia! pg100.txt +aemilia, pg100.txt +aemilia. pg100.txt +aemilius pg100.txt +aemilius? pg100.txt +aeneas pg100.txt +aeneas, pg100.txt +aeneas. pg100.txt +aeneas? pg100.txt +aeolian pg3200.txt +aeons pg3200.txt +aer' pg100.txt +aeson. pg100.txt +aesthetic pg3200.txt +afar] pg100.txt +afeard pg3200.txt, pg100.txt +afeard, pg100.txt +afeard--" pg3200.txt +afeard-thy pg100.txt +afeard. pg100.txt +afeard." pg3200.txt +afeard? pg100.txt +afeard?" pg3200.txt +afeared pg3200.txt +aff pg3200.txt +affability pg31100.txt +affability!" pg31100.txt +affability, pg31100.txt +affability; pg100.txt +affable pg31100.txt +affable, pg100.txt +affable-- pg3200.txt +affable. pg100.txt +affably: pg3200.txt +affair pg31100.txt, pg3200.txt +affair, pg31100.txt +affair--" pg3200.txt +affair. pg31100.txt, pg3200.txt, pg100.txt +affair." pg31100.txt, pg3200.txt +affair.' pg3200.txt +affair; pg31100.txt +affaire. pg100.txt +affairs pg31100.txt, pg3200.txt, pg100.txt +affairs, pg31100.txt, pg3200.txt, pg100.txt +affairs. pg31100.txt, pg3200.txt, pg100.txt +affairs.' pg3200.txt +affairs.] pg3200.txt +affairs; pg31100.txt, pg3200.txt, pg100.txt +affect pg31100.txt, pg3200.txt, pg100.txt +affect. pg3200.txt, pg100.txt +affectation pg31100.txt, pg3200.txt +affectation, pg31100.txt, pg100.txt +affectation. pg3200.txt +affectations. pg100.txt +affected pg31100.txt, pg3200.txt, pg100.txt +affected. pg100.txt +affected; pg3200.txt, pg100.txt +affected?" pg3200.txt +affectedly pg100.txt +affecting pg31100.txt, pg3200.txt +affection pg31100.txt, pg3200.txt, pg100.txt +affection! pg100.txt +affection!" pg3200.txt +affection) pg31100.txt +affection, pg31100.txt, pg3200.txt, pg100.txt +affection- pg100.txt +affection. pg31100.txt, pg3200.txt, pg100.txt +affection." pg31100.txt +affection; pg31100.txt, pg100.txt +affection?" pg31100.txt +affectionate pg31100.txt, pg3200.txt +affectionate, pg31100.txt, pg3200.txt +affectionate. pg31100.txt +affectionately pg31100.txt, pg3200.txt +affectionately, pg31100.txt, pg3200.txt +affectionately. pg31100.txt, pg3200.txt +affectionately." pg31100.txt +affections pg31100.txt, pg3200.txt, pg100.txt +affections, pg31100.txt, pg3200.txt, pg100.txt +affections--" pg31100.txt +affections. pg31100.txt, pg3200.txt, pg100.txt +affections." pg31100.txt +affections._] pg31100.txt +affections; pg31100.txt, pg100.txt +affections? pg100.txt +affective. pg3200.txt +affects pg3200.txt, pg100.txt +affects, pg100.txt +affects. pg100.txt +affiance pg100.txt +affiance? pg100.txt +affianced pg100.txt +affidavit pg3200.txt +affiliation, pg3200.txt +affined pg100.txt +affirm pg31100.txt, pg100.txt +affirmation pg3200.txt +affirmation, pg3200.txt +affirmation- pg100.txt +affirmative pg3200.txt +affirmative, pg3200.txt +affirmative. pg31100.txt +affirmed pg3200.txt +affirms pg3200.txt +afflict pg31100.txt, pg3200.txt +afflicted pg31100.txt, pg3200.txt +afflicted. pg100.txt +afflicted?" pg31100.txt +affliction pg31100.txt, pg3200.txt, pg100.txt +affliction!" pg31100.txt +affliction, pg31100.txt +affliction. pg31100.txt, pg100.txt +affliction." pg3200.txt +affliction._] pg31100.txt +afflictions, pg100.txt +afflictions. pg31100.txt +affluence pg31100.txt, pg3200.txt +affluence, pg31100.txt +affluence. pg3200.txt +affluence: pg31100.txt +affluent pg31100.txt, pg3200.txt +affluent. pg3200.txt +affly pg3200.txt +affly, pg3200.txt +afford pg31100.txt, pg3200.txt, pg100.txt +afford, pg31100.txt, pg3200.txt +afford. pg3200.txt, pg100.txt +afford." pg31100.txt +afford..... pg3200.txt +afford: pg31100.txt +afford; pg100.txt +afford? pg100.txt +afforded pg31100.txt, pg3200.txt +afforded, pg31100.txt +afforded. pg31100.txt, pg3200.txt +afforded; pg31100.txt +affordeth? pg100.txt +affording pg31100.txt +affords pg3200.txt, pg100.txt +affords, pg100.txt +affords. pg3200.txt, pg100.txt +affords." pg31100.txt, pg3200.txt +affray, pg100.txt +affrays--or pg3200.txt +affright; pg100.txt +affright? pg100.txt +affrighted! pg100.txt +affrighted, pg100.txt +affront pg31100.txt, pg3200.txt +affront, pg31100.txt, pg3200.txt +affront. pg3200.txt +affronts pg3200.txt +affronts. pg3200.txt +affy pg3200.txt, pg100.txt +affy, pg3200.txt +afghanistan. pg3200.txt +afield, pg100.txt +afield? pg100.txt +afire pg3200.txt +afire! pg3200.txt +afire, pg3200.txt, pg100.txt +afire. pg3200.txt +afire.' pg3200.txt +afire; pg3200.txt +aflame pg3200.txt +afloat pg3200.txt +afloat, pg100.txt +afoot pg100.txt +afoot, pg3200.txt, pg100.txt +afoot. pg3200.txt, pg100.txt +afoot: pg100.txt +afore pg100.txt +aforesaid pg3200.txt +aforetime. pg3200.txt +afraid pg31100.txt, pg3200.txt, pg100.txt +afraid!" pg3200.txt +afraid"? pg100.txt +afraid, pg31100.txt, pg3200.txt, pg100.txt +afraid," pg3200.txt +afraid--nobody'll pg3200.txt +afraid--of--of--well, pg3200.txt +afraid--well, pg3200.txt +afraid. pg3200.txt, pg100.txt +afraid." pg31100.txt, pg3200.txt +afraid:--but pg31100.txt +afraid; pg3200.txt +afraid? pg100.txt +afraid?" pg31100.txt, pg3200.txt +afresh. pg31100.txt, pg100.txt +africa pg3200.txt +africa, pg3200.txt +africa. pg3200.txt +africa." pg3200.txt +africa; pg3200.txt +african pg3200.txt +african, pg3200.txt +african; pg100.txt +africans pg3200.txt +aft, pg3200.txt +aft: pg3200.txt +aftah pg3200.txt +after! pg100.txt +after!" pg3200.txt +after) pg3200.txt +after, pg3200.txt, pg100.txt +after," pg3200.txt +after--after--well, pg3200.txt +after--the pg31100.txt +after-boat pg3200.txt +after-lives. pg3200.txt +after-loss: pg100.txt +after-meeting, pg100.txt +after-sweeps. pg3200.txt +after-times. pg100.txt +after. pg31100.txt, pg3200.txt, pg100.txt +after." pg3200.txt +after; pg3200.txt +after? pg100.txt +after?" pg3200.txt +afternoon pg31100.txt, pg3200.txt, pg100.txt +afternoon, pg3200.txt, pg100.txt +afternoon--fine pg3200.txt +afternoon--had pg3200.txt +afternoon. pg3200.txt, pg100.txt +afternoon." pg31100.txt, pg3200.txt +afternoon..... pg3200.txt +afternoon; pg100.txt +afternoon? pg100.txt +afternoons pg3200.txt +afternoons. pg3200.txt +afternoons?" pg3200.txt +afterthought pg3200.txt +aftertime pg3200.txt +afterward pg3200.txt, pg100.txt +afterward, pg3200.txt +afterward. pg3200.txt, pg100.txt +afterward." pg3200.txt +afterward; pg3200.txt +afterward? pg3200.txt +afterward?" pg3200.txt +afterwards pg31100.txt, pg3200.txt, pg100.txt +afterwards! pg100.txt +afterwards, pg31100.txt, pg3200.txt, pg100.txt +afterwards-- pg31100.txt +afterwards--"is pg31100.txt +afterwards. pg31100.txt, pg3200.txt, pg100.txt +afterwards." pg31100.txt, pg3200.txt +afterwards; pg31100.txt, pg100.txt +aftly, pg3200.txt +ag'in pg3200.txt +ag'in!' pg3200.txt +ag'in. pg3200.txt +ag'in." pg3200.txt +ag'in; pg3200.txt +again pg31100.txt, pg3200.txt, pg100.txt +again! pg31100.txt, pg3200.txt, pg100.txt +again!" pg31100.txt, pg3200.txt +again!' pg3200.txt +again!'" pg3200.txt +again!--got pg3200.txt +again!--make pg3200.txt +again!..... pg3200.txt +again), pg3200.txt +again, pg31100.txt, pg3200.txt, pg100.txt +again," pg31100.txt +again- pg100.txt +again-- pg31100.txt, pg3200.txt +again--" pg31100.txt, pg3200.txt +again--a pg3200.txt +again--and pg3200.txt +again--ben pg3200.txt +again--disease, pg3200.txt +again--for pg3200.txt +again--found pg3200.txt +again--got pg3200.txt +again--he pg3200.txt +again--how pg3200.txt +again--i pg3200.txt +again--ideal. pg3200.txt +again--it pg3200.txt +again--none pg3200.txt +again--once, pg3200.txt +again--the pg3200.txt +again--then: pg3200.txt +again--we pg3200.txt +again-he pg3200.txt +again. pg31100.txt, pg3200.txt, pg100.txt +again." pg31100.txt, pg3200.txt +again."so pg31100.txt +again.' pg3200.txt, pg100.txt +again.'" pg31100.txt +again.- pg100.txt +again.--mr. pg31100.txt +again.... pg3200.txt +again..... pg3200.txt +again.] pg3200.txt +again: pg31100.txt, pg3200.txt, pg100.txt +again:-- pg3200.txt +again; pg31100.txt, pg3200.txt, pg100.txt +again? pg31100.txt, pg3200.txt, pg100.txt +again?" pg31100.txt, pg3200.txt +again?' pg3200.txt +again] pg3200.txt, pg100.txt +again]--"and pg3200.txt +against pg31100.txt, pg3200.txt, pg100.txt +against, pg31100.txt, pg100.txt +against. pg3200.txt, pg100.txt +against." pg31100.txt +agamemnon pg100.txt +agamemnon! pg100.txt +agamemnon, pg100.txt +agamemnon. pg100.txt +agamemnon.' pg100.txt +agamemnon; pg100.txt +agamemnon? pg100.txt +agate, pg3200.txt +agate--alabaster--mother-of-pearl pg3200.txt +agatha pg31100.txt +agatha. pg31100.txt +age pg31100.txt, pg3200.txt, pg100.txt +age! pg31100.txt, pg100.txt +age!" pg31100.txt, pg3200.txt +age" pg3200.txt +age) pg100.txt +age, pg31100.txt, pg3200.txt, pg100.txt +age,' pg100.txt +age--" pg3200.txt +age--. pg31100.txt +age--and pg31100.txt, pg3200.txt +age--but pg3200.txt +age--he pg3200.txt +age--nobody pg3200.txt +age--said: pg3200.txt +age--some pg3200.txt +age--that pg3200.txt +age-long pg3200.txt +age. pg31100.txt, pg3200.txt, pg100.txt +age." pg31100.txt, pg3200.txt +age.] pg3200.txt +age: pg3200.txt +age; pg31100.txt, pg3200.txt, pg100.txt +age? pg3200.txt, pg100.txt +age?" pg31100.txt, pg3200.txt +age?' pg3200.txt +aged pg3200.txt +aged, pg3200.txt, pg100.txt +aged. pg3200.txt +agencies. pg3200.txt +agency. pg3200.txt +agent pg3200.txt, pg100.txt +agent's pg3200.txt +agent, pg3200.txt +agent--but pg3200.txt +agent. pg3200.txt +agents' pg3200.txt +agents, pg3200.txt +ages pg3200.txt, pg100.txt +ages! pg3200.txt +ages" pg3200.txt +ages, pg3200.txt, pg100.txt +ages--even pg3200.txt +ages--out pg3200.txt +ages--soldiers pg3200.txt +ages. pg3200.txt +ages." pg3200.txt +ages; pg31100.txt, pg3200.txt +ages? pg3200.txt +agesilaus pg3200.txt +aggrandisements pg3200.txt +aggrandizement pg3200.txt +aggrandizement. pg31100.txt +aggravate pg3200.txt +aggravated pg3200.txt +aggravating pg3200.txt +aggravation pg3200.txt +aggravation. pg31100.txt +aggravations pg3200.txt +aggregate pg3200.txt +aggregate, pg3200.txt +aggregate. pg3200.txt +aggregate.--enterprise. pg3200.txt +aggregate?" pg3200.txt +aggression. pg3200.txt +aggressions, pg3200.txt +aggressive pg3200.txt +aggressive; pg3200.txt +aggressively pg3200.txt +aghast pg3200.txt +aghast. pg3200.txt +agility." pg3200.txt +agin pg3200.txt +agin!" pg3200.txt +agin" pg3200.txt +agin, pg3200.txt +agin. pg3200.txt +agin." pg3200.txt +agincourt pg100.txt +agincourt, pg3200.txt, pg100.txt +agincourt--near pg3200.txt +agincourt. pg3200.txt, pg100.txt +agincourt; pg3200.txt +agincourt? pg100.txt +agitate pg31100.txt +agitated pg31100.txt, pg3200.txt +agitated!" pg31100.txt +agitated, pg31100.txt +agitated. pg3200.txt +agitated." pg3200.txt +agitating, pg31100.txt +agitation pg31100.txt, pg100.txt +agitation, pg31100.txt +agitation-- pg31100.txt +agitation. pg31100.txt +agitation; pg31100.txt +agitations pg31100.txt, pg3200.txt +agitations, pg3200.txt +aglovale pg3200.txt +aglovale--" pg3200.txt +aglow pg3200.txt +aglow: pg3200.txt +agnes pg3200.txt +agnize pg100.txt +agnostics pg3200.txt +agnostics, pg3200.txt +ago pg31100.txt, pg3200.txt, pg100.txt +ago! pg3200.txt +ago!-- pg3200.txt +ago, pg31100.txt, pg3200.txt, pg100.txt +ago,) pg3200.txt +ago-- pg3200.txt +ago--a pg3200.txt +ago--and pg3200.txt +ago--never pg3200.txt +ago--she pg31100.txt +ago--so pg3200.txt +ago--to pg3200.txt +ago--two pg3200.txt +ago--wagga-wagga. pg3200.txt +ago--when pg3200.txt +ago--where pg3200.txt +ago--you pg3200.txt +ago. pg31100.txt, pg3200.txt, pg100.txt +ago." pg31100.txt, pg3200.txt +ago.' pg3200.txt +ago: pg3200.txt +ago; pg3200.txt +ago? pg100.txt +ago?" pg3200.txt +agone, pg3200.txt +agone. pg3200.txt +agone?" pg3200.txt +agonies pg31100.txt, pg3200.txt +agonies! pg31100.txt +agonized pg3200.txt +agonizing pg3200.txt +agony pg31100.txt, pg3200.txt +agony!" pg3200.txt +agony, pg3200.txt +agony-- pg31100.txt +agony--in pg3200.txt +agony. pg3200.txt, pg100.txt +agony." pg3200.txt +agony: pg3200.txt +agony? pg100.txt +agra pg3200.txt +agra, pg3200.txt +agravaine pg3200.txt +agreable. pg31100.txt +agree pg31100.txt, pg3200.txt, pg100.txt +agree, pg100.txt +agree. pg31100.txt, pg3200.txt +agree." pg31100.txt +agree; pg100.txt +agree? pg100.txt +agreeable pg31100.txt, pg3200.txt +agreeable!" pg31100.txt +agreeable, pg31100.txt, pg3200.txt +agreeable. pg31100.txt +agreeable." pg31100.txt +agreeable; pg31100.txt +agreeable?"--"he pg31100.txt +agreeableness pg31100.txt +agreeableness, pg31100.txt +agreeableness." pg31100.txt +agreeably pg31100.txt, pg3200.txt +agreed pg31100.txt, pg3200.txt, pg100.txt +agreed, pg31100.txt, pg3200.txt, pg100.txt +agreed. pg31100.txt, pg3200.txt, pg100.txt +agreed." pg3200.txt +agreed: pg100.txt +agreed? pg100.txt +agreeing pg31100.txt, pg3200.txt, pg100.txt +agreeing; pg100.txt +agreement pg31100.txt, pg3200.txt, pg100.txt +agreement, pg31100.txt +agreement. pg31100.txt, pg3200.txt, pg100.txt +agricultural pg3200.txt +agriculture pg3200.txt +agriculture--was pg3200.txt +agriculture. pg3200.txt +agriculture?" pg3200.txt +agrippa pg100.txt +agrippa! pg100.txt +agrippa, pg100.txt +agrippa. pg100.txt +aground pg3200.txt +aground!" pg3200.txt +aground, pg3200.txt +aground. pg3200.txt +aground?" pg3200.txt +ague pg100.txt +ague-proof. pg100.txt +ague? pg100.txt +aguecheek pg100.txt +aguecheek.' pg100.txt +aguecheek? pg100.txt +agueface. pg100.txt +agues, pg100.txt +agwyne pg3200.txt +ah pg3200.txt +ah! pg31100.txt, pg100.txt +ah, pg3200.txt, pg100.txt +ah-- pg3200.txt +ah--is pg3200.txt +ah-ing pg3200.txt +ah. pg3200.txt +ah? pg100.txt +aha!] pg3200.txt +ahab pg3200.txt +ahead pg3200.txt +ahead!" pg3200.txt +ahead' pg3200.txt +ahead, pg3200.txt +ahead. pg3200.txt +ahead." pg3200.txt +ahead; pg3200.txt +ahungry, pg100.txt +aid, pg31100.txt, pg100.txt +aid- pg100.txt +aid. pg3200.txt, pg100.txt +aid." pg31100.txt +aid; pg100.txt +aided pg3200.txt +aiding pg3200.txt +aiding, pg100.txt +aids pg3200.txt +ailing pg3200.txt +ailing! pg3200.txt +ailing, pg3200.txt +ailment." pg3200.txt +ailment; pg3200.txt +ailments pg3200.txt +ailments, pg3200.txt +ailments. pg3200.txt +aim pg31100.txt, pg3200.txt, pg100.txt +aim'd. pg100.txt +aim, pg3200.txt, pg100.txt +aim. pg100.txt +aim; pg100.txt +aimed pg31100.txt, pg3200.txt +aimed, pg3200.txt +aimlessly, pg3200.txt +aims pg100.txt +aims- pg100.txt +aims. pg3200.txt +ain' pg3200.txt +ain't pg3200.txt +ain't." pg3200.txt +ain't.' pg3200.txt +ain't; pg3200.txt +ain't?" pg3200.txt +air pg31100.txt, pg3200.txt, pg100.txt +air! pg3200.txt, pg100.txt +air!" pg3200.txt +air, pg31100.txt, pg3200.txt, pg100.txt +air-- pg3200.txt +air--"i pg3200.txt +air--diria--pshaw! pg3200.txt +air--yes, pg3200.txt +air. pg31100.txt, pg3200.txt, pg100.txt +air." pg31100.txt, pg3200.txt +air.' pg31100.txt +air: pg3200.txt, pg100.txt +air; pg31100.txt, pg3200.txt, pg100.txt +air? pg100.txt +air?" pg3200.txt +aired pg3200.txt +airiest pg3200.txt +airily pg3200.txt +airing pg3200.txt +airing?" pg31100.txt +airs pg3200.txt +airs, pg31100.txt, pg3200.txt +airs--so pg31100.txt +airs--tum-ti-tum-ti-ti pg3200.txt +airy pg3200.txt +airy! pg31100.txt +aisle pg3200.txt +aisle. pg3200.txt +aisles pg3200.txt +aisles, pg3200.txt +aisles. pg3200.txt +aisles; pg3200.txt +aisy, pg3200.txt +aix pg3200.txt +aix, pg3200.txt +ajar. pg3200.txt +ajar; pg31100.txt +ajax pg100.txt +ajax! pg100.txt +ajax', pg100.txt +ajax, pg3200.txt, pg100.txt +ajax. pg3200.txt, pg100.txt +akin pg31100.txt, pg3200.txt +al'ays pg3200.txt +al-naschars pg3200.txt +alabama pg3200.txt +alabaster pg3200.txt +alabaster, pg100.txt +alabaster. pg3200.txt, pg100.txt +alabaster; pg3200.txt +alack! pg100.txt +alack, pg100.txt +alacrity pg31100.txt, pg3200.txt, pg100.txt +alacrity, pg3200.txt +alacrity. pg31100.txt, pg3200.txt +alacrity: pg3200.txt +aladdin's pg31100.txt +alae. pg3200.txt +alarbus pg100.txt +alaric pg3200.txt +alarm pg31100.txt, pg3200.txt, pg100.txt +alarm!" pg3200.txt +alarm, pg31100.txt, pg3200.txt, pg100.txt +alarm--of pg31100.txt +alarm. pg31100.txt, pg3200.txt +alarm." pg3200.txt +alarm.' pg100.txt +alarm; pg31100.txt, pg3200.txt +alarm?' pg3200.txt +alarmed pg31100.txt, pg3200.txt +alarmed, pg31100.txt +alarmed. pg3200.txt +alarming pg31100.txt, pg3200.txt +alarming, pg31100.txt +alarming." pg31100.txt +alarming.-- pg31100.txt +alarmingly. pg31100.txt +alarms pg31100.txt, pg3200.txt +alarms, pg3200.txt +alarms; pg3200.txt +alarum pg100.txt +alarum. pg100.txt +alarum] pg100.txt +alarums pg100.txt +alarums. pg100.txt +alarums] pg100.txt +alas pg3200.txt +alas! pg31100.txt, pg3200.txt, pg100.txt +alas!" pg31100.txt +alas!--" pg3200.txt +alas, pg31100.txt, pg3200.txt, pg100.txt +alaska. pg3200.txt +alaskan pg3200.txt +alban pg100.txt +alban. pg100.txt +albans pg100.txt +albans, pg100.txt +albany pg100.txt +albany, pg100.txt +albany. pg3200.txt, pg100.txt +albany? pg100.txt +albeit pg3200.txt +albert pg3200.txt +albert. pg3200.txt +albion pg100.txt +albion, pg100.txt +albion. pg100.txt +album!' pg3200.txt +alchemist, pg100.txt +alchemist: pg3200.txt +alchemy, pg100.txt +alchemy: pg100.txt +alchemy? pg100.txt +alcibiades pg100.txt +alcibiades, pg100.txt +alcohol pg3200.txt +alcohol, pg3200.txt +alcoholic pg3200.txt +alden pg3200.txt +alden: pg3200.txt +alderman pg3200.txt +alderman, pg3200.txt, pg100.txt +alderman. pg3200.txt +aldermanic pg3200.txt +aldermen pg100.txt +aldermen, pg100.txt +aldermen--" pg3200.txt +aldermen. pg3200.txt +aldine pg3200.txt +aldrich pg3200.txt +aldrich. pg3200.txt +aldrich? pg3200.txt +aldriches, pg3200.txt +ale pg3200.txt +ale, pg100.txt +ale-wife pg100.txt +ale. pg100.txt +ale.' pg100.txt +ale? pg100.txt +ale] pg100.txt +aleck pg3200.txt +aleck!" pg3200.txt +aleck, pg3200.txt +aleck." pg3200.txt +aleck?" pg3200.txt +alehouse pg100.txt +alehouses pg100.txt +alencon pg100.txt +alencon!" pg3200.txt +alencon's. pg100.txt +alencon, pg100.txt +alencon. pg100.txt +alert pg3200.txt +alert." pg3200.txt +alertness, pg31100.txt +alertness. pg3200.txt +alexander pg3200.txt, pg100.txt +alexander. pg3200.txt, pg100.txt +alexanders pg100.txt +alexandre pg3200.txt +alexandria pg100.txt +alexandria, pg100.txt +alexandria-malta,..............828 pg3200.txt +alexandria; pg3200.txt, pg100.txt +alexas pg100.txt +alexas? pg100.txt +alf, pg3200.txt +alfred pg3200.txt +alfred!" pg3200.txt +alfred, pg3200.txt +algebra pg3200.txt +alhambra pg3200.txt +alhambra, pg3200.txt +alhambra. pg3200.txt +alhammal pg3200.txt +ali, pg3200.txt +alias pg3200.txt +alias, pg100.txt +alias?" pg3200.txt +aliases, pg3200.txt +aliases." pg3200.txt +alibi." pg3200.txt +alice pg3200.txt, pg100.txt +alice, pg3200.txt +alice." pg31100.txt +alice?" pg3200.txt +alicia. pg31100.txt +alien pg3200.txt, pg100.txt +aliena. pg100.txt +alienable, pg31100.txt +alienated pg31100.txt +alienations, pg31100.txt +alight pg3200.txt, pg100.txt +alight, pg3200.txt +alighted pg31100.txt +alights] pg100.txt +aliis. pg100.txt +alike pg31100.txt, pg3200.txt, pg100.txt +alike, pg3200.txt, pg100.txt +alike," pg3200.txt +alike-- pg3200.txt +alike--but pg3200.txt +alike. pg31100.txt, pg3200.txt, pg100.txt +alike." pg31100.txt, pg3200.txt +alike: pg100.txt +alike; pg3200.txt, pg100.txt +alike? pg100.txt +alisande pg3200.txt +alisander pg100.txt +alisander- pg100.txt +alisander. pg100.txt +alison pg3200.txt +alison, pg3200.txt +alive pg31100.txt, pg3200.txt, pg100.txt +alive! pg3200.txt +alive!" pg3200.txt +alive, pg31100.txt, pg3200.txt, pg100.txt +alive--had pg3200.txt +alive. pg31100.txt, pg3200.txt, pg100.txt +alive." pg3200.txt +alive.' pg3200.txt +alive; pg3200.txt, pg100.txt +alive? pg100.txt +alive?" pg3200.txt +alkali pg3200.txt +all! pg31100.txt, pg3200.txt, pg100.txt +all!" pg31100.txt, pg3200.txt +all!' pg100.txt +all's pg3200.txt +all) pg100.txt +all, pg31100.txt, pg3200.txt, pg100.txt +all," pg31100.txt, pg3200.txt +all,) pg3200.txt +all,--he pg31100.txt +all- pg100.txt +all-- pg3200.txt +all--" pg31100.txt, pg3200.txt +all--"indeed?" pg3200.txt +all--and pg3200.txt +all--as pg3200.txt +all--at pg3200.txt +all--come! pg3200.txt +all--eight pg3200.txt +all--his pg31100.txt +all--i pg3200.txt +all--price pg31100.txt +all--self-sacrifice. pg3200.txt +all--shakespeare! pg3200.txt +all--she pg31100.txt +all--the pg3200.txt +all--told pg3200.txt +all--you pg3200.txt +all-absorbing pg3200.txt +all-but-forgotten pg3200.txt +all-embracing--and pg3200.txt +all-enclosing pg3200.txt +all-his pg3200.txt +all-inclusive." pg3200.txt +all. pg31100.txt, pg3200.txt, pg100.txt +all." pg31100.txt, pg3200.txt +all.' pg3200.txt, pg100.txt +all.- pg100.txt +all.--"agreed." pg3200.txt +all: pg31100.txt, pg3200.txt, pg100.txt +all; pg31100.txt, pg3200.txt, pg100.txt +all;--a pg31100.txt +all? pg3200.txt, pg100.txt +all?" pg31100.txt, pg3200.txt +all?' pg3200.txt +allah pg3200.txt +allahabad. pg3200.txt +allant pg3200.txt +allay pg100.txt +allay. pg100.txt +allayed pg31100.txt +allayed, pg100.txt +alleged pg3200.txt, pg100.txt +alleghenies; pg3200.txt +allegiance pg3200.txt +allegiance, pg3200.txt, pg100.txt +allegiance. pg100.txt +allegory pg3200.txt +allegory, pg3200.txt +allegory?" pg3200.txt +alleluia! pg3200.txt +allen pg31100.txt, pg3200.txt +allen's pg31100.txt +allen's?" pg31100.txt +allen, pg31100.txt, pg3200.txt +allen. pg31100.txt, pg3200.txt +allen." pg31100.txt +allen; pg31100.txt +allen? pg31100.txt +allen?" pg31100.txt +allenham pg31100.txt +allenham. pg31100.txt +allenham?" pg31100.txt +allens, pg31100.txt +allerheiligen pg3200.txt +allerheiligen, pg3200.txt +allerheiligen. pg3200.txt +alleviated pg31100.txt +alleviation pg31100.txt +alleviations pg3200.txt +alley pg3200.txt +alley! pg3200.txt +alley!" pg3200.txt +alley, pg31100.txt, pg3200.txt +alley--and pg3200.txt +alley. pg3200.txt +alleys pg3200.txt +alleys; pg3200.txt +allez pg3200.txt +allgemeine pg3200.txt +alliance pg31100.txt, pg100.txt +alliance, pg100.txt +alliance. pg31100.txt, pg3200.txt, pg100.txt +alliance; pg31100.txt +alliance? pg100.txt +alliances. pg3200.txt +allie pg3200.txt +allied pg3200.txt, pg100.txt +allies pg100.txt +allies, pg3200.txt, pg100.txt +allies. pg100.txt +allies; pg100.txt +alligant pg100.txt +alligator pg3200.txt +alligator. pg3200.txt +alligators.' pg3200.txt +alligators; pg3200.txt +allons. pg3200.txt +allopaths, pg3200.txt +allotted pg3200.txt +allover pg3200.txt +allow pg31100.txt, pg3200.txt, pg100.txt +allow'd. pg100.txt +allow'd; pg100.txt +allow, pg31100.txt, pg3200.txt, pg100.txt +allow. pg31100.txt, pg3200.txt, pg100.txt +allow." pg31100.txt, pg3200.txt +allow? pg100.txt +allowable pg31100.txt +allowable, pg31100.txt +allowance pg31100.txt, pg3200.txt, pg100.txt +allowance, pg100.txt +allowance; pg100.txt +allowances pg31100.txt +allowances, pg3200.txt +allowed pg31100.txt, pg3200.txt +allowed!! pg100.txt +allowed!" pg3200.txt +allowed, pg3200.txt +allowed--farewell pg3200.txt +allowed. pg31100.txt, pg3200.txt +allowed.' pg3200.txt +allowed; pg3200.txt +allowing pg31100.txt, pg3200.txt +allowing) pg31100.txt +allowing, pg100.txt +allows pg3200.txt +allows: pg100.txt +alloy pg31100.txt, pg3200.txt +alloy. pg31100.txt +alloyed pg31100.txt +allude pg3200.txt +alluded pg31100.txt +alluded, pg31100.txt +alluding pg31100.txt, pg3200.txt +alluring, pg3200.txt +allusion pg31100.txt, pg3200.txt +allusion. pg31100.txt +allusions pg31100.txt +allusions, pg3200.txt +ally, pg100.txt +alma pg3200.txt +almanac pg3200.txt +almanac." pg3200.txt +almanack.) pg3200.txt +almesbury." pg3200.txt +almighty pg3200.txt +almighty! pg100.txt +almighty, pg100.txt +almost pg31100.txt, pg3200.txt, pg100.txt +almost, pg31100.txt, pg3200.txt +almost. pg3200.txt +almost; pg3200.txt +alms pg3200.txt, pg100.txt +alms-deed; pg100.txt +alms-drink. pg100.txt +alms-house pg3200.txt +alms. pg3200.txt, pg100.txt +alms; pg100.txt +almshouse, pg3200.txt +almshouses, pg31100.txt +aloft pg3200.txt +aloft, pg3200.txt, pg100.txt +aloft. pg3200.txt +aloft: pg3200.txt +aloft; pg3200.txt +alone pg31100.txt, pg3200.txt, pg100.txt +alone! pg31100.txt, pg100.txt +alone!" pg3200.txt +alone, pg31100.txt, pg3200.txt, pg100.txt +alone- pg100.txt +alone--far pg3200.txt +alone--it pg3200.txt +alone--she pg3200.txt +alone--usually pg3200.txt +alone. pg31100.txt, pg3200.txt, pg100.txt +alone." pg31100.txt, pg3200.txt +alone.' pg3200.txt +alone: pg3200.txt, pg100.txt +alone; pg31100.txt, pg3200.txt, pg100.txt +alone? pg100.txt +alone?" pg31100.txt, pg3200.txt +alone?' pg3200.txt +along pg31100.txt, pg3200.txt, pg100.txt +along! pg3200.txt +along!" pg3200.txt +along!") pg3200.txt +along, pg31100.txt, pg3200.txt, pg100.txt +along--" pg3200.txt +along--about pg3200.txt +along--and pg3200.txt +along--but pg3200.txt +along--here's pg3200.txt +along--one pg3200.txt +along--there pg3200.txt +along. pg31100.txt, pg3200.txt, pg100.txt +along." pg3200.txt +along.' pg3200.txt +along.] pg3200.txt +along: pg3200.txt +along; pg3200.txt, pg100.txt +along? pg3200.txt, pg100.txt +along?" pg3200.txt +along] pg3200.txt +alongside pg3200.txt +alonso, pg100.txt +alonzo pg3200.txt +alonzo!" pg3200.txt +alonzo's pg3200.txt +alonzo's, pg3200.txt +alonzo--you pg3200.txt +alonzo. pg3200.txt +alonzo." pg3200.txt +alonzo?" pg3200.txt +alonzo?' pg3200.txt +aloof pg100.txt +aloof! pg100.txt +aloof, pg3200.txt, pg100.txt +aloof. pg100.txt +aloof; pg100.txt +aloof] pg100.txt +aloud pg31100.txt, pg3200.txt, pg100.txt +aloud, pg3200.txt, pg100.txt +aloud-- pg3200.txt +aloud--"you pg31100.txt +aloud. pg31100.txt, pg3200.txt, pg100.txt +aloud." pg3200.txt +aloud: pg3200.txt +aloud; pg100.txt +alp!--that pg3200.txt +alp-climbers pg3200.txt +alp-climbing] pg3200.txt +alp; pg3200.txt +alpenstock pg3200.txt +alpenstock, pg3200.txt +alpenstock; pg3200.txt +alpenstocks pg3200.txt +alpenstocks, pg3200.txt +alpes. pg3200.txt +alphabet pg31100.txt, pg3200.txt +alphabet, pg3200.txt, pg100.txt +alphabet. pg3200.txt +alphonso pg100.txt +alpine pg3200.txt +alps pg3200.txt, pg100.txt +alps, pg3200.txt, pg100.txt +alps. pg3200.txt +alps; pg3200.txt +alps? pg3200.txt +already pg31100.txt, pg3200.txt, pg100.txt +already! pg100.txt +already, pg31100.txt, pg3200.txt, pg100.txt +already- pg100.txt +already. pg31100.txt, pg3200.txt, pg100.txt +already." pg31100.txt, pg3200.txt +already: pg100.txt +already; pg31100.txt, pg3200.txt, pg100.txt +already? pg100.txt +already?" pg31100.txt +already?--did pg31100.txt +alretty?" pg3200.txt +also pg31100.txt, pg3200.txt, pg100.txt +also! pg3200.txt, pg100.txt +also, pg31100.txt, pg3200.txt, pg100.txt +also--" pg3200.txt +also. pg31100.txt, pg3200.txt, pg100.txt +also." pg3200.txt +also: pg3200.txt +also; pg31100.txt, pg3200.txt +also?" pg3200.txt +alt'red. pg100.txt +alta pg3200.txt +alta, pg3200.txt +alta-california, pg3200.txt +alta: pg3200.txt +altar pg3200.txt +altar, pg31100.txt, pg3200.txt +altar. pg3200.txt +altar." pg31100.txt +altar? pg3200.txt +altars pg100.txt +altars. pg100.txt +alter pg31100.txt, pg3200.txt, pg100.txt +alter'd! pg100.txt +alter'd. pg100.txt +alter'd? pg100.txt +alter." pg31100.txt +alteration pg31100.txt, pg3200.txt, pg100.txt +alteration! pg100.txt +alteration, pg31100.txt, pg100.txt +alteration. pg31100.txt, pg100.txt +alteration." pg3200.txt +alteration; pg31100.txt +alterations pg31100.txt +altered pg31100.txt, pg3200.txt +altered, pg31100.txt +altered. pg3200.txt +altered." pg31100.txt, pg3200.txt +altered? pg31100.txt +altering pg31100.txt, pg3200.txt +alternate pg3200.txt +alternately pg3200.txt +alternately. pg3200.txt +alternating. pg3200.txt +alters. pg100.txt +alterthumswissenschaften. pg3200.txt +although pg31100.txt, pg3200.txt, pg100.txt +although, pg100.txt +altitude pg3200.txt, pg100.txt +altitude, pg3200.txt +altitude. pg3200.txt +altitudes pg3200.txt +altitudes, pg3200.txt +altitudes.' pg3200.txt +altogether pg31100.txt, pg3200.txt, pg100.txt +altogether, pg31100.txt, pg3200.txt +altogether--stands pg3200.txt +altogether--thought pg31100.txt +altogether. pg31100.txt, pg3200.txt, pg100.txt +altogether." pg31100.txt, pg3200.txt +altogether; pg3200.txt +alton, pg3200.txt, pg100.txt +alum pg3200.txt +aluminium; pg3200.txt +alway', pg3200.txt +alway, pg3200.txt +always pg31100.txt, pg3200.txt, pg100.txt +always!" pg3200.txt +always, pg3200.txt +always--" pg3200.txt +always--but pg31100.txt +always--flew pg3200.txt +always--there pg3200.txt +always--what pg31100.txt +always--wonder pg3200.txt +always. pg3200.txt +always." pg31100.txt, pg3200.txt +always: pg3200.txt +always; pg3200.txt +always?" pg3200.txt +alwuz pg3200.txt +am! pg31100.txt, pg3200.txt, pg100.txt +am, pg31100.txt, pg3200.txt, pg100.txt +am- pg100.txt +am--just--about--asleep-- pg3200.txt +am. pg31100.txt, pg3200.txt, pg100.txt +am." pg31100.txt, pg3200.txt +am.' pg3200.txt, pg100.txt +am: pg100.txt +am; pg31100.txt, pg3200.txt, pg100.txt +am? pg100.txt +am?" pg31100.txt +amain pg100.txt +amain, pg100.txt +amain. pg100.txt +amajuba, pg3200.txt +amaking, pg100.txt +amanuensis pg3200.txt +amanuensis, pg3200.txt +amaranth!" pg3200.txt +amaranth, pg3200.txt +amaranth. pg3200.txt +amateur pg31100.txt, pg3200.txt +amaz'd pg100.txt +amaz'd, pg100.txt +amaz'd. pg100.txt +amaz'd; pg100.txt +amaze pg100.txt +amazed pg31100.txt, pg3200.txt, pg100.txt +amazed),-- pg3200.txt +amazed, pg100.txt +amazed. pg3200.txt, pg100.txt +amazed; pg100.txt +amazed? pg3200.txt +amazedly, pg100.txt +amazedly? pg100.txt +amazement pg31100.txt, pg3200.txt +amazement! pg31100.txt +amazement) pg31100.txt +amazement, pg31100.txt, pg3200.txt, pg100.txt +amazement. pg31100.txt, pg3200.txt +amazement: pg3200.txt +amazement; pg31100.txt +amazes pg31100.txt, pg100.txt +amazeth. pg100.txt +amazing pg31100.txt, pg3200.txt +amazing!" pg3200.txt +amazing!' pg3200.txt +amazing. pg3200.txt +amazingly pg31100.txt, pg3200.txt +amazingly, pg31100.txt +amazingly--provided, pg3200.txt +amazingly. pg3200.txt +amazingly." pg31100.txt +amazon pg3200.txt +amazon, pg100.txt +amazon. pg3200.txt, pg100.txt +amazon: pg3200.txt +amazons pg100.txt +amazons, pg100.txt +amazons--this pg3200.txt +ambassador pg3200.txt, pg100.txt +ambassador! pg3200.txt +ambassador, pg3200.txt, pg100.txt +ambassador; pg100.txt +ambassadors pg100.txt +ambassadors, pg3200.txt, pg100.txt +ambassadors. pg3200.txt, pg100.txt +ambassadors: pg3200.txt +ambassadorship pg3200.txt +amber, pg3200.txt, pg100.txt +ambiguides. pg100.txt +ambiguities pg100.txt +ambition pg31100.txt, pg3200.txt, pg100.txt +ambition! pg100.txt +ambition, pg31100.txt, pg3200.txt, pg100.txt +ambition- pg100.txt +ambition. pg3200.txt +ambition: pg100.txt +ambition; pg3200.txt, pg100.txt +ambition? pg100.txt +ambitions pg3200.txt +ambitions, pg3200.txt +ambitions. pg3200.txt +ambitious pg31100.txt, pg3200.txt, pg100.txt +ambitious, pg31100.txt, pg3200.txt, pg100.txt +ambitious. pg100.txt +ambitious; pg100.txt +ambitious? pg100.txt +ambled pg100.txt +ambles pg100.txt +ambling. pg100.txt +ambo. pg100.txt +ambulance!" pg3200.txt +ambulance, pg3200.txt +ambulinia pg3200.txt +ambulinia! pg3200.txt +ambulinia, pg3200.txt +ambulinia," pg3200.txt +ambulinia-- pg3200.txt +ambulinia. pg3200.txt +ambulinia: pg3200.txt +ambulinia; pg3200.txt +ambuscade." pg3200.txt +ambuscade; pg3200.txt +ambuscades pg3200.txt +ambush pg3200.txt, pg100.txt +ambush. pg100.txt +ambushed pg3200.txt +ambushes, pg3200.txt +amelia pg31100.txt, pg3200.txt +amelia, pg3200.txt +amelia. pg3200.txt +amelia." pg31100.txt, pg3200.txt +amelia.) pg3200.txt +amelia? pg3200.txt +ameliorating pg3200.txt +amelioration pg3200.txt +amen pg3200.txt +amen! pg3200.txt, pg100.txt +amen, pg100.txt +amen. pg3200.txt, pg100.txt +amen." pg3200.txt +amen.' pg3200.txt +amen: pg3200.txt +amen; pg100.txt +amen? pg100.txt +amenable. pg31100.txt +amend pg3200.txt, pg100.txt +amend. pg100.txt +amend; pg100.txt +amended pg3200.txt +amended. pg100.txt +amending pg3200.txt +amendment pg31100.txt +amendment, pg31100.txt, pg100.txt +amendment. pg31100.txt +amendment? pg100.txt +amends pg31100.txt, pg3200.txt, pg100.txt +amends! pg100.txt +amends, pg100.txt +amends. pg3200.txt, pg100.txt +amends; pg100.txt +ament. pg3200.txt +america pg3200.txt +america! pg3200.txt +america!" pg3200.txt +america's pg3200.txt +america) pg3200.txt +america, pg3200.txt +america--" pg3200.txt +america--end pg3200.txt +america--i pg3200.txt +america. pg3200.txt +america." pg3200.txt +america: pg3200.txt +america; pg3200.txt +america?" pg3200.txt +america?' pg3200.txt +american pg3200.txt +american" pg3200.txt +american's pg3200.txt +american, pg3200.txt +american--roman pg3200.txt +american. pg3200.txt +american." pg3200.txt +americanism pg3200.txt +americanism. pg3200.txt +americanisms, pg3200.txt +americans pg3200.txt +americans). pg3200.txt +americans, pg3200.txt +americans. pg3200.txt +americans." pg3200.txt +americans; pg3200.txt +americans?" pg3200.txt +americans?'" pg3200.txt +americans] pg3200.txt +americay, pg3200.txt +amerikanischer pg3200.txt +amerikee." pg3200.txt +amerique. pg3200.txt +ames-ace pg100.txt +amiable pg31100.txt, pg3200.txt, pg100.txt +amiable, pg31100.txt +amiable--i pg31100.txt +amiable. pg31100.txt, pg100.txt +amiable." pg31100.txt +amiable?" pg31100.txt +amiableness pg31100.txt +amid pg31100.txt, pg3200.txt +amidships, pg3200.txt +amidships--both pg3200.txt +amidst pg31100.txt, pg3200.txt +amis pg100.txt +amis; pg100.txt +amiss pg31100.txt, pg3200.txt, pg100.txt +amiss, pg100.txt +amiss. pg3200.txt, pg100.txt +amiss: pg3200.txt, pg100.txt +amiss; pg100.txt +amiss? pg31100.txt, pg100.txt +amiss?" pg3200.txt +amities, pg100.txt +amity pg100.txt +amity, pg31100.txt, pg100.txt +amity. pg100.txt +ammunition pg3200.txt +ammunition's pg3200.txt +ammunition, pg3200.txt +ammunition-pocket, pg3200.txt +ammunition. pg3200.txt +ammunition." pg3200.txt +among pg31100.txt, pg3200.txt, pg100.txt +among's! pg100.txt +among. pg100.txt +amongst pg31100.txt, pg3200.txt, pg100.txt +amorous! pg100.txt +amorous, pg100.txt +amorous. pg100.txt +amort? pg100.txt +amount pg31100.txt, pg3200.txt +amount. pg31100.txt, pg3200.txt, pg100.txt +amount; pg3200.txt +amount? pg3200.txt +amount?" pg3200.txt +amounted pg31100.txt, pg3200.txt +amounting pg31100.txt, pg3200.txt +amounts pg3200.txt, pg100.txt +amphitheater; pg3200.txt +ample pg31100.txt, pg3200.txt +ample--it pg3200.txt +ample. pg3200.txt, pg100.txt +amplified; pg100.txt +amplify pg100.txt +amplitude pg3200.txt +amply pg3200.txt +amply, pg31100.txt +amputating, pg3200.txt +amputation, pg3200.txt +amsterdam pg3200.txt +amsterdam, pg3200.txt +amulets. pg3200.txt +amuse pg31100.txt, pg3200.txt +amuse, pg3200.txt +amuse; pg31100.txt +amused pg31100.txt, pg3200.txt +amused. pg31100.txt, pg3200.txt +amused; pg31100.txt +amusement pg31100.txt, pg3200.txt +amusement, pg31100.txt, pg3200.txt +amusement--a pg3200.txt +amusement. pg31100.txt, pg3200.txt +amusement." pg31100.txt, pg3200.txt +amusement.--much pg31100.txt +amusement.--she pg31100.txt +amusement?" pg3200.txt +amusement_] pg31100.txt +amusements pg31100.txt, pg3200.txt +amusements, pg31100.txt, pg3200.txt +amusements. pg31100.txt +amuses pg31100.txt, pg3200.txt +amusing pg31100.txt, pg3200.txt +amusing. pg31100.txt, pg3200.txt +amusing; pg31100.txt +amyntas, pg100.txt +an! pg3200.txt +an' pg3200.txt +an't pg31100.txt, pg100.txt +an't? pg100.txt +an, pg3200.txt, pg100.txt +an. pg3200.txt, pg100.txt +an; pg100.txt +an? pg3200.txt +anaesthetics." pg3200.txt +anaesthetics.' pg3200.txt +analogy pg31100.txt +analogy, pg31100.txt +analogy. pg3200.txt +analysis pg3200.txt +analysis. pg3200.txt +analyze pg3200.txt +ananias, pg3200.txt +ananias. pg3200.txt +anatomical pg3200.txt +anatomiz'd pg100.txt +anatomize pg100.txt +anatomy pg100.txt +anatomy. pg100.txt +anblick pg3200.txt +anblick--eine pg3200.txt +ancestor pg3200.txt, pg100.txt +ancestor!" pg3200.txt +ancestor, pg31100.txt, pg100.txt +ancestor. pg3200.txt, pg100.txt +ancestor; pg3200.txt +ancestors pg3200.txt +ancestors, pg3200.txt, pg100.txt +ancestors- pg100.txt +ancestors--the pg3200.txt +ancestors. pg3200.txt, pg100.txt +ancestors; pg3200.txt, pg100.txt +ancestral pg3200.txt +ancestry pg3200.txt, pg100.txt +ancestry, pg3200.txt, pg100.txt +anchor pg3200.txt +anchor, pg3200.txt +anchor. pg3200.txt +anchor? pg100.txt +anchorage, pg100.txt +anchored pg3200.txt +anchors, pg100.txt +anchors. pg3200.txt +ancient pg31100.txt, pg3200.txt, pg100.txt +ancient, pg3200.txt, pg100.txt +ancient. pg3200.txt, pg100.txt +ancient; pg100.txt +ancients pg3200.txt +and"-- pg3200.txt +and't pg100.txt +and, pg31100.txt, pg3200.txt, pg100.txt +and- pg100.txt +and-- pg3200.txt +and--" pg31100.txt, pg3200.txt +and--' pg3200.txt +and---- pg3200.txt +and----" pg3200.txt +and----' pg3200.txt +and--and pg31100.txt +and--and-- pg3200.txt +and--and--" pg3200.txt +and--and--can pg3200.txt +and--and--well, pg3200.txt +and--but pg3200.txt +and--we pg31100.txt +and--well, pg3200.txt +and--would pg3200.txt +and-passed-to-third-reading-and- pg3200.txt +andalusia pg3200.txt +andalusia, pg3200.txt +andalusian-born--" pg3200.txt +ande pg3200.txt +andeol. pg3200.txt +andersen pg3200.txt +anderson, pg31100.txt +anderson. pg31100.txt +anderson." pg31100.txt +anderthalbjahr. pg3200.txt +andes." pg3200.txt +andirons- pg100.txt +andren. pg100.txt +andrew pg3200.txt, pg100.txt +andrew! pg100.txt +andrew!" pg3200.txt +andrew. pg100.txt +andrew.' pg100.txt +andrew? pg100.txt +andrews pg3200.txt +andrews, pg31100.txt +andrews_. pg31100.txt +andromache pg100.txt +andronici pg100.txt +andronicus pg100.txt +andronicus, pg100.txt +andronicus," pg3200.txt +andronicus- pg100.txt +andronicus. pg100.txt +andronicus: pg100.txt +andronicus; pg100.txt +andronicus? pg100.txt +andy. pg3200.txt +andy?" pg3200.txt +anecdote pg31100.txt, pg3200.txt +anecdote, pg3200.txt +anecdote. pg3200.txt +anecdote: pg3200.txt +anecdote; pg3200.txt +anecdotes pg31100.txt, pg3200.txt +anecdotes-- pg3200.txt +anecdotes. pg3200.txt +anecdotes." pg3200.txt +aneroid, pg3200.txt +anew pg31100.txt, pg3200.txt, pg100.txt +anew, pg100.txt +anew. pg3200.txt +anew.- pg100.txt +anew; pg100.txt +ang'red pg100.txt +angel pg31100.txt, pg3200.txt, pg100.txt +angel!" pg3200.txt +angel's pg3200.txt +angel's, pg3200.txt +angel's." pg3200.txt +angel, pg31100.txt, pg3200.txt +angel--an pg3200.txt +angel. pg31100.txt, pg3200.txt, pg100.txt +angel." pg3200.txt +angel; pg3200.txt, pg100.txt +angelic pg3200.txt +angelica; pg100.txt +angelical! pg100.txt +angelico--" pg3200.txt +angelo pg3200.txt, pg100.txt +angelo! pg100.txt +angelo!" pg3200.txt +angelo's pg3200.txt +angelo, pg3200.txt, pg100.txt +angelo- pg100.txt +angelo--that pg3200.txt +angelo. pg3200.txt, pg100.txt +angelo." pg3200.txt +angelo.] pg3200.txt +angelo; pg3200.txt, pg100.txt +angelo? pg100.txt +angelo?" pg3200.txt +angels pg3200.txt, pg100.txt +angels, pg3200.txt +angels--and pg3200.txt +angels. pg3200.txt, pg100.txt +angels." pg3200.txt +anger pg31100.txt, pg3200.txt, pg100.txt +anger, pg31100.txt, pg3200.txt, pg100.txt +anger. pg31100.txt, pg3200.txt, pg100.txt +anger." pg31100.txt +anger; pg100.txt +anger? pg100.txt +angered pg3200.txt +angerly. pg100.txt +angers pg100.txt +anges? pg100.txt +angiers pg100.txt +anglais. pg100.txt +anglais? pg100.txt +angle pg3200.txt +angler pg100.txt +angles pg3200.txt +angles, pg3200.txt +angles; pg3200.txt +angliae pg100.txt +anglish. pg100.txt +anglo-saxon pg3200.txt +angrily-- pg3200.txt +angrily. pg3200.txt +angrily: pg3200.txt +angrily; pg100.txt +angry pg31100.txt, pg3200.txt, pg100.txt +angry, pg31100.txt, pg3200.txt, pg100.txt +angry. pg31100.txt, pg100.txt +angry." pg31100.txt, pg3200.txt +angry; pg3200.txt, pg100.txt +angry? pg100.txt +angry?" pg31100.txt +anguish pg3200.txt +anguish!" pg3200.txt +anguish, pg3200.txt +anguish-- pg3200.txt +anguish. pg3200.txt, pg100.txt +anguish; pg100.txt +anguished pg3200.txt +angular pg3200.txt +angus, pg100.txt +angus. pg100.txt +anhalt," pg31100.txt +anhalt." pg31100.txt +anhalt? pg31100.txt +animal pg3200.txt +animal's, pg3200.txt +animal, pg3200.txt +animal--adam pg3200.txt +animal--possibly pg3200.txt +animal. pg3200.txt +animal." pg31100.txt +animal; pg3200.txt +animal?" pg3200.txt +animals pg3200.txt, pg100.txt +animals, pg3200.txt +animals--but pg3200.txt +animals--they pg3200.txt +animals. pg3200.txt +animals." pg3200.txt +animals; pg3200.txt +animals?" pg3200.txt +animate pg31100.txt, pg3200.txt +animated pg31100.txt, pg3200.txt +animating pg31100.txt +animating. pg31100.txt +animation pg31100.txt, pg3200.txt +animation!" pg3200.txt +animation, pg3200.txt +animation-- pg3200.txt +animation--but pg31100.txt +animation--full pg3200.txt +animation. pg3200.txt +animation: pg3200.txt +animation; pg31100.txt +animosities? pg3200.txt +animosity pg3200.txt +anitus, pg3200.txt +anjou pg100.txt +anjou, pg100.txt +ankle pg3200.txt +ankle-bones, pg3200.txt +ankle; pg3200.txt, pg100.txt +ankles pg3200.txt +ankles! pg3200.txt +ankles. pg3200.txt +ankles." pg31100.txt +ann, pg3200.txt +ann." pg3200.txt +anna pg3200.txt +annals pg3200.txt +annamaria pg31100.txt +anne pg31100.txt, pg3200.txt, pg100.txt +anne! pg100.txt +anne's pg31100.txt +anne, pg31100.txt, pg100.txt +anne- pg100.txt +anne--but pg31100.txt +anne. pg31100.txt, pg100.txt +anne." pg31100.txt +anne; pg31100.txt, pg100.txt +anne? pg31100.txt, pg100.txt +anne?" pg31100.txt +annesley, pg31100.txt +annex pg3200.txt +annex, pg3200.txt +annex.) pg3200.txt +annexed pg31100.txt +anni. pg3200.txt +annie pg3200.txt +annie! pg3200.txt +annie's, pg3200.txt +annie, pg3200.txt +annie--$20. pg3200.txt +annie. pg3200.txt +annie? pg3200.txt +annihilate pg3200.txt +annihilate; pg3200.txt +annihilation pg31100.txt +annihilation. pg31100.txt +anniversary pg3200.txt +annothanize pg100.txt +announce pg31100.txt, pg3200.txt +announced pg31100.txt, pg3200.txt +announced, pg3200.txt +announced. pg31100.txt, pg3200.txt +announced." pg3200.txt +announced: pg3200.txt +announcement pg3200.txt +announcement: pg3200.txt +announcing pg31100.txt +announcing: pg3200.txt +annoy! pg100.txt +annoy. pg100.txt +annoy? pg100.txt +annoyance pg3200.txt +annoyance, pg3200.txt, pg100.txt +annoyance-- pg3200.txt +annoyance. pg3200.txt +annoyances pg3200.txt +annoyances, pg3200.txt +annoyances. pg3200.txt +annoyed pg3200.txt +annoyed, pg3200.txt +annoyed. pg3200.txt +annoying pg3200.txt +annoying. pg3200.txt +annoying; pg3200.txt +annual pg3200.txt +annually pg3200.txt +annually, pg3200.txt +annually-- pg3200.txt +annuities pg31100.txt +annuities, pg31100.txt +annullable pg3200.txt +annulled pg3200.txt +annulled. pg3200.txt +annulled; pg3200.txt +anodyne pg3200.txt +anointed pg3200.txt, pg100.txt +anointed. pg3200.txt, pg100.txt +anon pg3200.txt, pg100.txt +anon! pg100.txt +anon, pg3200.txt, pg100.txt +anon- pg100.txt +anon. pg100.txt +anon." pg3200.txt +anon; pg100.txt +anon? pg100.txt +anonymous pg3200.txt +another pg31100.txt, pg3200.txt, pg100.txt +another! pg100.txt +another's pg100.txt +another's. pg31100.txt, pg3200.txt +another, pg31100.txt, pg3200.txt, pg100.txt +another," pg31100.txt +another- pg100.txt +another-- pg3200.txt +another--a pg3200.txt +another--and pg31100.txt +another--climbed pg3200.txt +another--he pg31100.txt +another--stones pg3200.txt +another--the pg3200.txt +another-partly pg3200.txt +another. pg31100.txt, pg3200.txt, pg100.txt +another." pg31100.txt, pg3200.txt +another.' pg3200.txt +another.) pg3200.txt +another: pg3200.txt +another; pg31100.txt, pg3200.txt, pg100.txt +another? pg31100.txt, pg3200.txt, pg100.txt +another?" pg3200.txt +another_ pg31100.txt +anson pg3200.txt +answer pg31100.txt, pg3200.txt, pg100.txt +answer! pg3200.txt, pg100.txt +answer!" pg3200.txt +answer'd pg100.txt +answer'd! pg100.txt +answer'd, pg100.txt +answer'd- pg100.txt +answer'd. pg100.txt +answer'd; pg100.txt +answer'd? pg100.txt +answer, pg31100.txt, pg3200.txt, pg100.txt +answer-- pg3200.txt +answer--"and pg31100.txt +answer--none. pg100.txt +answer--nothing pg3200.txt +answer. pg31100.txt, pg3200.txt, pg100.txt +answer." pg31100.txt, pg3200.txt, pg100.txt +answer: pg31100.txt, pg3200.txt, pg100.txt +answer; pg31100.txt, pg3200.txt +answer? pg31100.txt, pg3200.txt, pg100.txt +answer?" pg3200.txt +answer?' pg3200.txt +answer] pg100.txt +answerable pg31100.txt, pg3200.txt +answerable." pg31100.txt +answered pg31100.txt, pg3200.txt +answered, pg31100.txt, pg3200.txt, pg100.txt +answered-- pg31100.txt, pg3200.txt +answered--'no.' pg3200.txt +answered. pg31100.txt, pg3200.txt, pg100.txt +answered." pg31100.txt +answered: pg31100.txt, pg3200.txt +answered; pg3200.txt, pg100.txt +answered? pg100.txt +answerest pg100.txt +answering pg31100.txt, pg3200.txt, pg100.txt +answering, pg31100.txt +answering. pg3200.txt +answers pg31100.txt, pg3200.txt, pg100.txt +answers, pg100.txt +answers. pg3200.txt, pg100.txt +answers." pg3200.txt +answers: pg100.txt +answers; pg100.txt +ant pg3200.txt +ant, pg3200.txt, pg100.txt +ant-- pg3200.txt +ant-hill: pg3200.txt +antagonist pg31100.txt, pg3200.txt +ante pg3200.txt +ante-room, pg3200.txt +antedated pg3200.txt +antedates pg3200.txt +antelope pg3200.txt +antelopes pg3200.txt +antennae pg3200.txt +antenor pg100.txt +antenor! pg100.txt +antenor, pg100.txt +anterior pg3200.txt +anteroom pg3200.txt +anthem pg3200.txt +anthems. pg100.txt +anthology pg31100.txt +anthony-- pg100.txt +anthracite, pg3200.txt +anthracite. pg3200.txt +anti-mormon pg3200.txt +anti-profanity pg3200.txt +anti-rummies, pg3200.txt +antiates pg100.txt +antiates, pg100.txt +antiates; pg100.txt +antic pg3200.txt, pg100.txt +antic, pg100.txt +antichambers." pg31100.txt +anticipate pg3200.txt, pg100.txt +anticipated pg31100.txt, pg3200.txt +anticipated. pg31100.txt +anticipated; pg31100.txt +anticipation pg31100.txt +anticipation, pg31100.txt +anticipation. pg3200.txt +anticipations pg31100.txt +anticipations, pg3200.txt +anticipations. pg3200.txt +anticipatory pg3200.txt +antics pg100.txt +antidote pg31100.txt, pg3200.txt, pg100.txt +antidote," pg3200.txt +antigonus pg100.txt +antigonus, pg100.txt +antigua pg31100.txt +antigua, pg31100.txt +antigua. pg31100.txt +antimony pg3200.txt +antiopa? pg100.txt +antipathy pg100.txt +antipholus pg100.txt +antipholus! pg100.txt +antipholus, pg100.txt +antipholus. pg100.txt +antipholus; pg100.txt +antipholus? pg100.txt +antipholuses pg100.txt +antipodes pg3200.txt, pg100.txt +antipodes, pg100.txt +antipodes. pg100.txt +antique pg3200.txt +antique, pg3200.txt +antiques, pg3200.txt +antiquities pg3200.txt +antiquities, pg3200.txt +antiquities. pg3200.txt +antiquity pg3200.txt +antiquity, pg100.txt +antiquity. pg3200.txt +antiquity." pg3200.txt +antium pg100.txt +antium. pg100.txt +antium? pg100.txt +antoine pg3200.txt +antoinette: pg3200.txt +antonette." pg3200.txt +antonio pg100.txt +antonio! pg100.txt +antonio's; pg100.txt +antonio, pg100.txt +antonio- pg100.txt +antonio. pg100.txt +antonio.] pg100.txt +antonio; pg100.txt +antonio? pg100.txt +antonio] pg100.txt +antonio]. pg100.txt +antonius. pg100.txt +antony pg3200.txt, pg100.txt +antony! pg100.txt +antony!' pg100.txt +antony's. pg100.txt +antony, pg100.txt +antony- pg100.txt +antony--and pg3200.txt +antony. pg100.txt +antony.' pg100.txt +antony; pg100.txt +antony? pg100.txt +ants pg3200.txt +ants. pg3200.txt +antwerp's pg31100.txt +antwerp." pg3200.txt +anvil pg3200.txt +anvil! pg3200.txt +anxieties pg3200.txt +anxieties, pg3200.txt +anxiety pg31100.txt, pg3200.txt +anxiety, pg31100.txt, pg3200.txt +anxiety--he pg31100.txt +anxiety. pg31100.txt, pg3200.txt +anxiety: pg3200.txt +anxiety; pg3200.txt +anxiety? pg3200.txt +anxious pg31100.txt, pg3200.txt +anxious, pg31100.txt, pg3200.txt +anxious. pg31100.txt +anxious; pg3200.txt +anxiously pg31100.txt, pg3200.txt +anxiously, pg3200.txt +anxiously. pg3200.txt +anxiously: pg3200.txt +anxiously; pg31100.txt +any!" pg3200.txt +any, pg3200.txt, pg100.txt +any--" pg3200.txt +any--and pg3200.txt +any--i pg3200.txt +any--shall pg3200.txt +any--which pg3200.txt +any-how. pg3200.txt +any. pg31100.txt, pg3200.txt, pg100.txt +any." pg31100.txt, pg3200.txt +any.' pg3200.txt +any..... pg3200.txt +any; pg31100.txt, pg3200.txt, pg100.txt +any? pg3200.txt, pg100.txt +any?" pg3200.txt +anybody pg31100.txt, pg3200.txt, pg100.txt +anybody!" pg31100.txt +anybody's pg3200.txt +anybody's. pg3200.txt +anybody's." pg31100.txt +anybody, pg31100.txt, pg3200.txt +anybody--he pg3200.txt +anybody. pg31100.txt, pg3200.txt +anybody." pg31100.txt, pg3200.txt +anybody.' pg3200.txt +anybody; pg31100.txt, pg3200.txt +anybody?" pg31100.txt, pg3200.txt +anyhow pg31100.txt +anyhow, pg3200.txt +anyhow. pg3200.txt +anyhow." pg3200.txt +anyhow.' pg3200.txt +anyhow; pg3200.txt +anyhow?" pg3200.txt +anymore pg3200.txt +anymore. pg3200.txt +anyone pg31100.txt, pg3200.txt, pg100.txt +anyone, pg31100.txt +anyone. pg3200.txt +anyone." pg31100.txt, pg3200.txt +anything pg31100.txt, pg3200.txt, pg100.txt +anything! pg3200.txt +anything!" pg3200.txt +anything!' pg3200.txt +anything, pg31100.txt, pg3200.txt, pg100.txt +anything," pg3200.txt +anything--" pg31100.txt, pg3200.txt +anything--anything pg3200.txt +anything. pg31100.txt, pg3200.txt, pg100.txt +anything." pg31100.txt, pg3200.txt +anything.' pg3200.txt +anything: pg3200.txt +anything; pg31100.txt, pg3200.txt, pg100.txt +anything? pg3200.txt, pg100.txt +anything?" pg31100.txt, pg3200.txt +anything?' pg3200.txt +anyway pg3200.txt +anyway!" pg3200.txt +anyway, pg3200.txt +anyway--' pg3200.txt +anyway--and, pg3200.txt +anyway--i pg3200.txt +anyway. pg3200.txt +anyway." pg3200.txt +anyway.' pg3200.txt +anyway; pg3200.txt +anyway? pg3200.txt +anyway?" pg3200.txt +anyway?' pg3200.txt +anywhere pg31100.txt, pg3200.txt +anywhere! pg3200.txt +anywhere, pg31100.txt, pg3200.txt +anywhere. pg31100.txt, pg3200.txt, pg100.txt +anywhere." pg31100.txt, pg3200.txt +anywhere.' pg3200.txt +anywhere.--perry pg31100.txt +anywhere; pg3200.txt, pg100.txt +anywhere? pg3200.txt +anywhere?" pg3200.txt +anywhere?' pg3200.txt +anywheres. pg3200.txt +anywheres." pg3200.txt +anywise pg3200.txt +anzeiger pg3200.txt +apace pg3200.txt, pg100.txt +apace, pg100.txt +apace--what pg3200.txt +apace. pg100.txt +apace.' pg100.txt +apace.- pg100.txt +apace; pg100.txt +apaches, pg3200.txt +apart pg31100.txt, pg3200.txt, pg100.txt +apart' pg3200.txt +apart, pg3200.txt, pg100.txt +apart--as pg3200.txt +apart. pg3200.txt, pg100.txt +apart." pg31100.txt, pg3200.txt +apart.' pg3200.txt +apart; pg3200.txt, pg100.txt +apart?' pg3200.txt +apart] pg100.txt +apartment pg31100.txt, pg3200.txt +apartment! pg31100.txt +apartment, pg31100.txt, pg3200.txt +apartment. pg31100.txt, pg3200.txt +apartments pg31100.txt, pg3200.txt, pg100.txt +apartments, pg31100.txt, pg3200.txt +apartments--conveyed pg3200.txt +apartments. pg31100.txt +apartments." pg3200.txt +apathy pg3200.txt +ape! pg100.txt +ape, pg100.txt +ape. pg100.txt +apemantus pg100.txt +apemantus! pg100.txt +apemantus. pg100.txt +apemantus? pg100.txt +apennines, pg100.txt +apes pg100.txt +apex pg3200.txt +apiece pg3200.txt, pg100.txt +apiece!" pg3200.txt +apiece, pg3200.txt +apiece--and pg3200.txt +apiece. pg3200.txt +apiece." pg31100.txt, pg3200.txt +apiece.' pg3200.txt +apish, pg100.txt +apish. pg100.txt +aplenty pg3200.txt +apocalypse pg3200.txt +apocalypse; pg3200.txt +apocryphal. pg3200.txt +apocryphal." pg3200.txt +apodictical pg3200.txt +apollinem.' pg100.txt +apollo pg100.txt +apollo! pg100.txt +apollo- pg100.txt +apollo. pg100.txt +apologetically. pg3200.txt +apologetically: pg3200.txt +apologies pg31100.txt +apologies. pg3200.txt +apologise pg31100.txt +apologise." pg31100.txt +apologised pg31100.txt +apologising pg31100.txt, pg3200.txt +apologising, pg31100.txt +apologize pg3200.txt +apologize, pg31100.txt +apologize; pg3200.txt +apologizes pg3200.txt +apologizing pg3200.txt +apology pg31100.txt, pg3200.txt, pg100.txt +apology, pg31100.txt, pg3200.txt +apology-- pg3200.txt +apology. pg100.txt +apology." pg31100.txt +apology: pg100.txt +apology? pg100.txt +apology?" pg3200.txt +apoplexy. pg100.txt +apostle pg3200.txt +apostle. pg3200.txt +apostles, pg3200.txt +apostles; pg100.txt +apostrophe: pg3200.txt +apostrophising pg3200.txt +apothecary pg31100.txt, pg100.txt +apothecary! pg100.txt +apothecary, pg100.txt +apothecary. pg31100.txt, pg100.txt +appall'd. pg100.txt +appalled pg3200.txt +appalled! pg3200.txt +appalled; pg3200.txt +appalling pg3200.txt +apparatus pg3200.txt +apparatus, pg3200.txt +apparatus. pg3200.txt +apparel pg3200.txt, pg100.txt +apparel, pg3200.txt, pg100.txt +apparel. pg100.txt +appareled pg3200.txt +apparell'd pg100.txt +apparell'd, pg100.txt +apparent pg31100.txt, pg3200.txt, pg100.txt +apparent, pg31100.txt, pg3200.txt, pg100.txt +apparent. pg31100.txt, pg100.txt +apparent; pg31100.txt +apparent? pg100.txt +apparently pg31100.txt, pg3200.txt +apparently, pg3200.txt +apparently--and pg3200.txt +apparently. pg3200.txt, pg100.txt +apparition pg3200.txt +apparition--a pg3200.txt +apparition. pg100.txt +apparitions pg3200.txt, pg100.txt +apparitions, pg3200.txt +apparitions?" pg3200.txt +appeach'd. pg100.txt +appeal pg31100.txt, pg3200.txt, pg100.txt +appeal'd, pg100.txt +appeal, pg31100.txt, pg100.txt +appeal. pg31100.txt, pg100.txt +appeal." pg3200.txt +appeal; pg31100.txt +appeal] pg3200.txt +appealed pg31100.txt, pg3200.txt +appealing pg3200.txt +appealingly, pg3200.txt +appealingly. pg3200.txt +appealingly: pg3200.txt +appeals pg3200.txt +appeals, pg3200.txt +appeals. pg3200.txt +appear pg31100.txt, pg3200.txt, pg100.txt +appear! pg100.txt +appear'd pg100.txt +appear'd, pg100.txt +appear'd? pg100.txt +appear, pg3200.txt, pg100.txt +appear. pg31100.txt, pg3200.txt, pg100.txt +appear." pg31100.txt +appear: pg3200.txt, pg100.txt +appear; pg31100.txt, pg100.txt +appear?" pg3200.txt +appearance pg31100.txt, pg3200.txt, pg100.txt +appearance! pg31100.txt +appearance" pg3200.txt +appearance, pg31100.txt, pg3200.txt, pg100.txt +appearance. pg31100.txt, pg3200.txt +appearance." pg3200.txt +appearance; pg31100.txt, pg3200.txt +appearance? pg100.txt +appearances pg31100.txt +appearances, pg3200.txt +appeared pg31100.txt, pg3200.txt, pg100.txt +appeared, pg31100.txt, pg3200.txt +appeared. pg31100.txt, pg3200.txt, pg100.txt +appeared; pg31100.txt +appearing pg31100.txt, pg3200.txt, pg100.txt +appears pg31100.txt, pg3200.txt, pg100.txt +appears, pg3200.txt, pg100.txt +appears- pg100.txt +appears. pg3200.txt, pg100.txt +appears; pg100.txt +appeas'd pg100.txt +appeas'd! pg100.txt +appeas'd. pg100.txt +appease pg3200.txt +appease; pg100.txt +appeased pg3200.txt, pg100.txt +appeased. pg3200.txt +appeases pg3200.txt +appeasing pg3200.txt +appele? pg100.txt +appellant, pg100.txt +appellants, pg100.txt +appellation pg31100.txt +appelles pg3200.txt +appelles, pg3200.txt +appendages pg31100.txt +appended pg3200.txt +appended, pg3200.txt +appendicitis. pg3200.txt +appending pg3200.txt +appendix pg3200.txt +appendix-basket) pg3200.txt +appendix. pg3200.txt, pg100.txt +appenines, pg3200.txt +appertinent pg100.txt +appertinents pg100.txt +appetite pg3200.txt, pg100.txt +appetite" pg3200.txt +appetite, pg31100.txt, pg3200.txt, pg100.txt +appetite- pg100.txt +appetite-compelling pg3200.txt +appetite-cure pg3200.txt +appetite-cure, pg3200.txt +appetite. pg3200.txt, pg100.txt +appetite; pg31100.txt, pg100.txt +appetiteless, pg3200.txt +appetites pg3200.txt, pg100.txt +appetites. pg3200.txt +appetizing pg3200.txt +appian pg3200.txt +appinted. pg3200.txt +applaud pg3200.txt +applaud! pg100.txt +applaud, pg3200.txt +applauded pg3200.txt +applauded! pg3200.txt +applauded, pg3200.txt +applauded." pg3200.txt +applauded; pg3200.txt +applauding pg3200.txt +applauding. pg3200.txt +applauding?" pg3200.txt +applause pg3200.txt, pg100.txt +applause! pg100.txt +applause)." pg3200.txt +applause, pg3200.txt +applause. pg3200.txt +applause." pg3200.txt +applause.) pg3200.txt +applause.] pg3200.txt +applause; pg3200.txt, pg100.txt +applause? pg3200.txt +applause]--is pg3200.txt +applauses pg3200.txt, pg100.txt +apple pg3200.txt +apple, pg3200.txt +apple-john. pg100.txt +apple-tarts. pg31100.txt +apple. pg3200.txt, pg100.txt +apple." pg3200.txt +apples pg3200.txt +apples! pg100.txt +apples!" pg31100.txt +apples, pg3200.txt +apples--in pg3200.txt +apples. pg31100.txt +apples; pg3200.txt +appletart? pg100.txt +appliance pg100.txt +appliance, pg100.txt +applicable" pg3200.txt +applicant's pg3200.txt +applicant." pg3200.txt +applicant; pg3200.txt +applicants pg3200.txt +applicants, pg3200.txt +applicants. pg3200.txt +application pg31100.txt, pg3200.txt +application, pg31100.txt, pg3200.txt +application. pg3200.txt +application; pg3200.txt +applications pg31100.txt +applications), pg31100.txt +applications; pg31100.txt +applied pg31100.txt, pg3200.txt, pg100.txt +applied, pg31100.txt +applied. pg31100.txt, pg3200.txt +applied: pg3200.txt +applies pg31100.txt, pg3200.txt +apply pg31100.txt, pg3200.txt, pg100.txt +apply, pg3200.txt +apply. pg3200.txt, pg100.txt +apply." pg31100.txt, pg3200.txt +applying pg31100.txt, pg3200.txt +appoint pg3200.txt, pg100.txt +appoint" pg3200.txt +appoint. pg3200.txt +appoint? pg100.txt +appointed pg31100.txt, pg3200.txt, pg100.txt +appointed, pg100.txt +appointed. pg3200.txt +appointed? pg100.txt +appointed?" pg3200.txt +appointing pg3200.txt +appointment pg31100.txt, pg3200.txt +appointment, pg31100.txt, pg3200.txt, pg100.txt +appointment. pg31100.txt, pg3200.txt, pg100.txt +appointments pg3200.txt +appointments! pg3200.txt +appointments, pg31100.txt +appointments. pg3200.txt, pg100.txt +appomattox-well. pg3200.txt +apportioned pg3200.txt +appreciate pg3200.txt +appreciate--" pg3200.txt +appreciate." pg3200.txt +appreciated pg31100.txt, pg3200.txt +appreciated--it pg3200.txt +appreciated. pg3200.txt +appreciation pg3200.txt +appreciation, pg3200.txt +appreciation. pg3200.txt +appreciations pg3200.txt +appreciatively pg3200.txt +apprehend pg100.txt +apprehend, pg31100.txt +apprehend. pg31100.txt +apprehended. pg31100.txt, pg100.txt +apprehended; pg31100.txt +apprehending pg31100.txt +apprehension pg31100.txt, pg3200.txt, pg100.txt +apprehension, pg31100.txt, pg3200.txt +apprehension-- pg3200.txt +apprehension--enough pg3200.txt +apprehension. pg31100.txt, pg3200.txt, pg100.txt +apprehension: pg3200.txt +apprehension; pg3200.txt, pg100.txt +apprehension? pg100.txt +apprehensions pg31100.txt, pg100.txt +apprehensions. pg3200.txt, pg100.txt +apprehensive pg31100.txt +apprehensive. pg3200.txt +apprehensive; pg100.txt +apprendre pg3200.txt +apprentice pg3200.txt +apprentice's pg3200.txt +apprentice. pg3200.txt +apprenticed pg3200.txt +apprenticehood pg100.txt +apprentices pg3200.txt +apprentices, pg3200.txt +apprenticeship pg3200.txt +apprenticeship, pg3200.txt +apprenticeship. pg3200.txt +apprising pg3200.txt +approach pg31100.txt, pg3200.txt, pg100.txt +approach'd pg100.txt +approach'd. pg100.txt +approach, pg31100.txt, pg3200.txt, pg100.txt +approach--except pg3200.txt +approach. pg3200.txt, pg100.txt +approach." pg31100.txt +approach; pg100.txt +approached pg31100.txt, pg3200.txt +approached, pg31100.txt, pg3200.txt +approaches pg3200.txt, pg100.txt +approaches, pg3200.txt +approaches. pg3200.txt, pg100.txt +approaches; pg100.txt +approacheth. pg100.txt +approaching pg31100.txt, pg3200.txt +approaching, pg31100.txt +approaching. pg31100.txt, pg3200.txt +approaching; pg31100.txt, pg3200.txt +approbation pg31100.txt, pg3200.txt, pg100.txt +approbation, pg31100.txt, pg3200.txt, pg100.txt +approbation. pg31100.txt, pg3200.txt, pg100.txt +approbation; pg100.txt +approof, pg100.txt +approof. pg100.txt +appropriate pg31100.txt, pg3200.txt +appropriate, pg3200.txt +appropriated pg31100.txt, pg3200.txt +appropriation pg3200.txt, pg100.txt +appropriation. pg3200.txt +appropriation?" pg3200.txt +appropriation?--if pg3200.txt +appropriations." pg3200.txt +approv'd, pg100.txt +approval pg3200.txt +approval, pg3200.txt +approval--a pg3200.txt +approval. pg3200.txt +approve pg31100.txt, pg3200.txt, pg100.txt +approve, pg3200.txt, pg100.txt +approve-" pg100.txt +approve. pg3200.txt +approved pg31100.txt, pg3200.txt, pg100.txt +approved. pg31100.txt +approves pg100.txt +approving pg3200.txt +approvingly. pg3200.txt +approximate pg3200.txt +approximated, pg3200.txt +approximately pg3200.txt +approximations, pg3200.txt +appurtenances pg3200.txt +apr.7,'95. pg3200.txt +apricocks, pg100.txt +apricots pg3200.txt +april pg31100.txt, pg3200.txt +april, pg3200.txt +april. pg3200.txt, pg100.txt +april.) pg3200.txt +aprons-- pg3200.txt +aprons. pg100.txt +apt pg31100.txt, pg3200.txt, pg100.txt +apt, pg100.txt +apt. pg100.txt +apt: pg100.txt +apt; pg100.txt +apt? pg100.txt +aptitude pg3200.txt +aptness pg100.txt +aptnesses, pg3200.txt +aqua-vitx. pg100.txt +aqueducts pg3200.txt +aquilon'd. pg100.txt +aquitaine pg100.txt +aquitaine, pg100.txt +aquitaine. pg100.txt +aquitaine; pg100.txt +arab pg3200.txt +arab, pg3200.txt +arab. pg3200.txt +arab.' pg3200.txt +arabesques, pg3200.txt +arabia pg100.txt +arabia. pg3200.txt +arabia? pg3200.txt +arabian pg3200.txt +arabic pg3200.txt +arabic." pg3200.txt +arabic?" pg3200.txt +arabs pg3200.txt +arabs, pg3200.txt +araminta pg3200.txt +ararat pg3200.txt +arathea, pg3200.txt +arbitrarily pg3200.txt +arbitrary pg3200.txt +arbitrate pg100.txt +arbitrate. pg100.txt +arbitrating pg100.txt +arbitrement pg100.txt +arbitrement, pg100.txt +arbour, pg31100.txt, pg100.txt +arbour.] pg100.txt +arbour]. pg100.txt +arc pg3200.txt, pg100.txt +arc's pg3200.txt +arc, pg3200.txt, pg100.txt +arc--come!" pg3200.txt +arc. pg3200.txt +arc." pg3200.txt +arc; pg3200.txt +arc? pg3200.txt +arc]. pg3200.txt +arch pg31100.txt, pg3200.txt, pg100.txt +arch, pg3200.txt +arch-enemy pg100.txt +arch-heretic, pg100.txt +arch-mock, pg100.txt +archangel pg3200.txt +archangel!" pg3200.txt +archangel. pg3200.txt +archangel.' pg3200.txt +archangel; pg3200.txt +archangels pg3200.txt +archangels, pg3200.txt +archbishop pg3200.txt, pg100.txt +archbishop's pg3200.txt +archbishop's, pg100.txt +archbishop, pg100.txt +archbishop. pg100.txt +archbishop; pg100.txt +archduchesses pg3200.txt +arched pg3200.txt +archelaus pg100.txt +archers; pg100.txt +archery, pg100.txt +archery; pg100.txt +arches pg3200.txt +arches, pg31100.txt, pg3200.txt +archibald! pg31100.txt +archibald, pg100.txt +archidamus pg100.txt +archipelago pg3200.txt +archipelago. pg3200.txt +archipelagoes pg3200.txt +architect pg3200.txt +architect, pg3200.txt +architect. pg3200.txt +architects pg3200.txt +architectural pg3200.txt +architecture pg31100.txt, pg3200.txt +architecture's. pg3200.txt +architecture, pg3200.txt +architecture--great pg3200.txt +architecture. pg3200.txt +archive pg31100.txt, pg3200.txt, pg100.txt +archives pg3200.txt +archives. pg3200.txt +archness pg31100.txt +archway, pg31100.txt +archy. pg3200.txt +arctic pg3200.txt +arctics pg3200.txt +arcu.' pg100.txt +arde- pg100.txt +arden pg100.txt +arden. pg100.txt +ardent pg31100.txt, pg3200.txt +ardent, pg31100.txt +ardently pg31100.txt +ardor pg3200.txt +ardour, pg31100.txt +arduous pg3200.txt +are! pg31100.txt, pg3200.txt, pg100.txt +are!" pg31100.txt, pg3200.txt +are!"-- pg31100.txt +are!' pg3200.txt +are"-- pg31100.txt +are) pg31100.txt +are, pg31100.txt, pg3200.txt, pg100.txt +are--" pg31100.txt, pg3200.txt +are--cowards--and pg3200.txt +are--i'll pg3200.txt +are--it pg3200.txt +are--perhaps pg31100.txt +are--safe. pg3200.txt +are--then pg3200.txt +are--they pg3200.txt +are--you pg3200.txt +are. pg31100.txt, pg3200.txt, pg100.txt +are." pg31100.txt, pg3200.txt +are.' pg3200.txt +are: pg3200.txt, pg100.txt +are; pg31100.txt, pg3200.txt, pg100.txt +are? pg3200.txt, pg100.txt +are?" pg31100.txt, pg3200.txt +area pg3200.txt +areas pg3200.txt +areas. pg3200.txt +aren't pg3200.txt +arena pg3200.txt +argal, pg100.txt +argenteuil pg3200.txt +argentie`re. pg3200.txt +argier pg100.txt +argier. pg100.txt +argosies pg100.txt +argosy pg100.txt +argosy? pg100.txt +argot pg3200.txt +argue pg31100.txt, pg3200.txt +argue, pg3200.txt +argue--and pg3200.txt +argue. pg3200.txt +argued pg3200.txt +argued; pg3200.txt +argues pg100.txt +arguing pg31100.txt +arguing. pg100.txt +argument pg31100.txt, pg3200.txt, pg100.txt +argument, pg31100.txt, pg3200.txt, pg100.txt +argument- pg100.txt +argument--none!" pg3200.txt +argument. pg3200.txt, pg100.txt +argument..... pg3200.txt +argument: pg100.txt +argument; pg3200.txt, pg100.txt +argument? pg100.txt +argument?" pg31100.txt +argumentative, pg3200.txt +argumentatively pg3200.txt +argumentatively: pg3200.txt +arguments pg31100.txt, pg3200.txt, pg100.txt +arguments, pg31100.txt, pg3200.txt +arguments. pg3200.txt +arguments; pg100.txt +arguments? pg100.txt +argus; pg100.txt +argyle pg31100.txt +arid pg3200.txt +ariel pg100.txt +ariel! pg100.txt +ariel, pg100.txt +ariel; pg100.txt +ariel] pg100.txt +aright pg31100.txt, pg100.txt +aright, pg100.txt +aright. pg100.txt +aright." pg3200.txt +aright: pg100.txt +aright? pg3200.txt, pg100.txt +aright?" pg3200.txt +arise pg31100.txt, pg100.txt +arise! pg100.txt +arise, pg31100.txt, pg3200.txt, pg100.txt +arise," pg31100.txt +arise--but pg31100.txt +arise. pg3200.txt +arise." pg31100.txt +arise; pg100.txt +arisen pg31100.txt, pg3200.txt +arises, pg100.txt +ariseth] pg100.txt +arising pg31100.txt, pg3200.txt, pg100.txt +arising, pg31100.txt +aristocracies pg3200.txt +aristocracies, pg3200.txt +aristocracies. pg3200.txt +aristocracy pg3200.txt +aristocracy, pg3200.txt +aristocracy. pg3200.txt +aristocracy; pg3200.txt +aristocratic pg3200.txt +aristocratic, pg3200.txt +aristocratically pg3200.txt +aristocrats pg3200.txt +aristocrats, pg3200.txt +aristotle." pg3200.txt +arithmetic pg3200.txt, pg100.txt +arithmetic, pg100.txt +arithmetician, pg100.txt +arizona." pg3200.txt +arizona; pg3200.txt +ark pg3200.txt +ark, pg3200.txt +ark; pg3200.txt +arkansas pg3200.txt +arkansas, pg3200.txt +arkansas; pg3200.txt +arkansaw!" pg3200.txt +arkansaw. pg3200.txt +arkwright's pg3200.txt +arkwright, pg3200.txt +arles, pg3200.txt +arles. pg3200.txt +arlington, pg3200.txt +arm pg31100.txt, pg3200.txt, pg100.txt +arm! pg100.txt +arm'd pg100.txt +arm'd, pg100.txt +arm'd. pg100.txt +arm'd? pg100.txt +arm's pg3200.txt +arm, pg31100.txt, pg3200.txt, pg100.txt +arm," pg3200.txt +arm--no pg3200.txt +arm-chair. pg3200.txt +arm-chairs. pg3200.txt +arm-joints, pg3200.txt +arm-pits. pg3200.txt +arm. pg3200.txt, pg100.txt +arm." pg31100.txt, pg3200.txt +arm; pg3200.txt, pg100.txt +arm? pg100.txt +arm?" pg3200.txt +arm] pg100.txt +armado pg100.txt +armado. pg100.txt +armado.' pg100.txt +armagh pg3200.txt +armagnac pg3200.txt +armagnac? pg100.txt +armaments pg3200.txt +armed pg3200.txt, pg100.txt +armed, pg3200.txt +armed. pg3200.txt +armed.' pg3200.txt +armenia pg100.txt +armenia, pg100.txt +armerry pg3200.txt +armful pg3200.txt +armies pg3200.txt, pg100.txt +armies! pg3200.txt +armies, pg3200.txt, pg100.txt +armies. pg3200.txt +armies; pg3200.txt +armies? pg100.txt +armies?" pg3200.txt +arming! pg3200.txt +arming. pg3200.txt +armor pg3200.txt +armor!--and pg3200.txt +armor, pg3200.txt +armor. pg3200.txt, pg100.txt +armor." pg3200.txt +armor: pg3200.txt +armor; pg3200.txt +armor? pg3200.txt +armory pg3200.txt +armour pg100.txt +armour. pg100.txt +armour; pg3200.txt +armour? pg100.txt +armourer pg100.txt +armourer, pg100.txt +armourer; pg100.txt +armoury pg100.txt +armoury. pg100.txt +armpit pg3200.txt +armpits pg3200.txt +armpits, pg3200.txt +arms pg31100.txt, pg3200.txt, pg100.txt +arms! pg3200.txt, pg100.txt +arms!" pg3200.txt +arms, pg31100.txt, pg3200.txt, pg100.txt +arms,` pg3200.txt +arms--he pg3200.txt +arms. pg31100.txt, pg3200.txt, pg100.txt +arms." pg3200.txt +arms.) pg3200.txt +arms; pg100.txt +arms? pg3200.txt, pg100.txt +arms] pg100.txt +army pg3200.txt, pg100.txt +army, pg3200.txt, pg100.txt +army--and pg3200.txt +army--men pg3200.txt +army--rather pg3200.txt +army. pg3200.txt, pg100.txt +army." pg3200.txt +army.)--exchange. pg3200.txt +army; pg100.txt +army? pg3200.txt, pg100.txt +army?" pg3200.txt +arno. pg3200.txt +arnold pg3200.txt +arnot pg3200.txt +aromatic pg3200.txt +aromatic, pg3200.txt +aromatic--in pg3200.txt +aroostook." pg3200.txt +arose pg31100.txt, pg3200.txt, pg100.txt +arose, pg31100.txt +arose. pg31100.txt, pg3200.txt, pg100.txt +aroun' pg3200.txt +aroun', pg3200.txt +aroun'." pg3200.txt +around pg31100.txt, pg3200.txt +around! pg31100.txt, pg3200.txt +around) pg3200.txt +around, pg3200.txt +around--but pg3200.txt +around--especially pg3200.txt +around--paris pg3200.txt +around. pg31100.txt, pg3200.txt +around." pg3200.txt +around: pg3200.txt +around; pg3200.txt +around?" pg3200.txt +around?' pg3200.txt +arouse pg3200.txt +aroused pg3200.txt +aroused, pg3200.txt +arragon, pg100.txt +arragon. pg100.txt +arraign pg3200.txt, pg100.txt +arraigns pg3200.txt +arrange pg31100.txt, pg3200.txt +arranged pg31100.txt, pg3200.txt +arranged, pg31100.txt +arranged--sometimes pg31100.txt +arranged. pg31100.txt, pg3200.txt +arranged." pg3200.txt +arrangement pg31100.txt, pg3200.txt +arrangement, pg31100.txt, pg3200.txt +arrangement. pg31100.txt, pg3200.txt +arrangements pg31100.txt, pg3200.txt +arrangements, pg31100.txt, pg3200.txt +arrangements--somehow pg3200.txt +arrangements. pg31100.txt, pg3200.txt +arranging pg31100.txt, pg3200.txt +arras pg100.txt +arras. pg100.txt +arras.] pg100.txt +array pg3200.txt +array'd? pg100.txt +array, pg3200.txt, pg100.txt +array,-- pg3200.txt +array. pg3200.txt, pg100.txt +array; pg100.txt +arrayed pg3200.txt +arrayed, pg3200.txt +arrearages, pg100.txt +arrest pg3200.txt +arrest, pg3200.txt, pg100.txt +arrest: pg100.txt +arrested pg31100.txt, pg3200.txt +arrested, pg3200.txt +arrests pg100.txt +arriv'd pg100.txt +arriv'd, pg100.txt +arriv'd. pg100.txt +arriv'd; pg100.txt +arriv'd? pg100.txt +arrival pg31100.txt, pg3200.txt +arrival!" pg31100.txt +arrival, pg31100.txt, pg3200.txt +arrival. pg31100.txt, pg3200.txt +arrival; pg31100.txt +arrivals. pg31100.txt +arrivals." pg31100.txt +arrivance. pg100.txt +arrive pg3200.txt, pg100.txt +arrive. pg3200.txt +arrive; pg3200.txt +arrive?" pg3200.txt +arrived pg31100.txt, pg3200.txt +arrived, pg31100.txt, pg3200.txt +arrived--the pg3200.txt +arrived. pg31100.txt, pg3200.txt, pg100.txt +arrived." pg31100.txt, pg3200.txt +arrived; pg3200.txt +arrived? pg3200.txt +arrived?" pg3200.txt +arrives pg3200.txt +arriving pg3200.txt, pg100.txt +arriving, pg31100.txt +arriving; pg3200.txt +arriving? pg3200.txt +arrogance pg100.txt +arrogance. pg100.txt +arrogance? pg100.txt +arrogant pg3200.txt +arrogant. pg3200.txt +arrow pg3200.txt +arrow, pg3200.txt +arrow-heads pg3200.txt +arrow-slit pg3200.txt +arrow. pg3200.txt +arrows pg3200.txt +arrows, pg3200.txt, pg100.txt +arrows] pg100.txt +arrowy pg3200.txt +arsenal. pg3200.txt +arsenic!" pg3200.txt +arson pg3200.txt +arsons. pg3200.txt +art pg31100.txt, pg3200.txt, pg100.txt +art! pg3200.txt, pg100.txt +art) pg100.txt +art, pg3200.txt, pg100.txt +art,-- pg3200.txt +art-critics pg3200.txt +art. pg31100.txt, pg3200.txt, pg100.txt +art." pg31100.txt, pg3200.txt +art: pg100.txt +art; pg3200.txt, pg100.txt +art? pg3200.txt, pg100.txt +artemidorus." pg100.txt +artemus pg3200.txt +artemus: pg3200.txt +arteries, pg100.txt +artery pg3200.txt +artery, pg3200.txt +artexnidorus pg3200.txt +artful pg31100.txt, pg3200.txt +arthur pg3200.txt, pg100.txt +arthur's pg3200.txt +arthur's." pg3200.txt +arthur, pg3200.txt, pg100.txt +arthur-orton-mary-baker-thompson-eddy-louis-the-seventeenth-veiled- pg3200.txt +arthur. pg3200.txt, pg100.txt +arthur." pg3200.txt +arthur; pg3200.txt +article pg31100.txt, pg3200.txt, pg100.txt +article, pg31100.txt, pg3200.txt, pg100.txt +article--as pg3200.txt +article. pg3200.txt, pg100.txt +article." pg3200.txt +article: pg100.txt +article? pg3200.txt +article?' pg3200.txt +articles pg31100.txt, pg3200.txt, pg100.txt +articles, pg3200.txt +articles. pg3200.txt, pg100.txt +articles: pg3200.txt +articles; pg100.txt +articles? pg100.txt +articulate pg100.txt +articulate, pg100.txt +articulation: pg3200.txt +articulations. pg3200.txt +artificer pg100.txt +artifices pg3200.txt +artificial pg3200.txt +artificial, pg3200.txt +artificialities, pg3200.txt +artificialities; pg3200.txt +artificiality pg3200.txt +artikel. pg3200.txt +artillery pg3200.txt, pg100.txt +artillery! pg3200.txt +artillery, pg3200.txt, pg100.txt +artillery. pg3200.txt +artillery; pg3200.txt +artisans. pg3200.txt +artist pg31100.txt, pg3200.txt +artist, pg3200.txt +artist--all pg3200.txt +artist. pg31100.txt, pg3200.txt +artist; pg3200.txt +artistic pg3200.txt +artistically pg3200.txt +artistically, pg3200.txt +artists pg3200.txt +artists, pg3200.txt +artists- pg100.txt +artists. pg3200.txt +artless pg31100.txt +artless, pg31100.txt +artois, pg100.txt +arts pg3200.txt, pg100.txt +arts! pg100.txt +arts, pg3200.txt, pg100.txt +arviragus pg100.txt +arviragus, pg100.txt +arviragus] pg100.txt +as't pg100.txt +as, pg31100.txt, pg3200.txt, pg100.txt +as-- pg3200.txt +as--" pg31100.txt, pg3200.txt +as--' pg3200.txt +as: pg3200.txt, pg100.txt +asafoetida, pg3200.txt +ascanius pg100.txt +ascend pg31100.txt, pg3200.txt, pg100.txt +ascended pg31100.txt, pg3200.txt +ascended, pg31100.txt, pg3200.txt +ascending pg31100.txt, pg3200.txt +ascension pg3200.txt, pg100.txt +ascent pg3200.txt +ascent, pg3200.txt +ascent--become pg3200.txt +ascent. pg3200.txt +ascertain pg31100.txt +ascertain, pg3200.txt +ascertained pg3200.txt +ascertained. pg31100.txt +ascertaining pg31100.txt +ascii" pg31100.txt, pg3200.txt, pg100.txt +ascii, pg100.txt +ascii. pg100.txt +ascribed pg31100.txt +ash pg3200.txt +ash-cake pg3200.txt +ash-cake. pg3200.txt +ash-heap! pg3200.txt +ash-hills pg3200.txt +ash-hopper pg3200.txt +ash-hopper, pg3200.txt +asham'd pg100.txt +asham'd, pg100.txt +asham'd. pg100.txt +asham'd? pg100.txt +ashamed pg31100.txt, pg3200.txt, pg100.txt +ashamed, pg31100.txt, pg3200.txt +ashamed. pg31100.txt, pg3200.txt +ashamed." pg3200.txt +ashamed? pg100.txt +ashburnham. pg31100.txt +ashby pg3200.txt +asher pg3200.txt +ashes pg3200.txt, pg100.txt +ashes! pg3200.txt +ashes, pg3200.txt, pg100.txt +ashes. pg3200.txt +ashes." pg3200.txt +ashes: pg3200.txt +ashes?" pg3200.txt +ashford, pg100.txt +ashford? pg100.txt +asho'!'--and pg3200.txt +asho', pg3200.txt +ashore pg31100.txt, pg3200.txt, pg100.txt +ashore!" pg3200.txt +ashore, pg31100.txt, pg3200.txt, pg100.txt +ashore- pg100.txt +ashore--midwinter pg3200.txt +ashore--with pg3200.txt +ashore. pg3200.txt, pg100.txt +ashore." pg3200.txt +ashore.' pg3200.txt +ashore; pg3200.txt +ashore? pg3200.txt, pg100.txt +ashore?" pg3200.txt +ashore?' pg3200.txt +ashouting. pg100.txt +ashworth pg31100.txt +asia, pg3200.txt, pg100.txt +asia--so pg3200.txt +aside pg31100.txt, pg3200.txt, pg100.txt +aside, pg31100.txt, pg3200.txt, pg100.txt +aside-- pg3200.txt +aside. pg3200.txt, pg100.txt +aside." pg3200.txt +aside.] pg3200.txt, pg100.txt +aside._] pg31100.txt +aside? pg100.txt +aside] pg100.txt +ask pg31100.txt, pg3200.txt, pg100.txt +ask'd pg100.txt +ask, pg31100.txt, pg3200.txt, pg100.txt +ask--" pg31100.txt, pg3200.txt +ask. pg3200.txt, pg100.txt +ask." pg31100.txt, pg3200.txt +ask.' pg3200.txt +ask: pg3200.txt +ask; pg3200.txt, pg100.txt +ask? pg100.txt +ask?" pg3200.txt +ask?' pg31100.txt +askance, pg100.txt +asked pg31100.txt, pg3200.txt +asked, pg31100.txt, pg3200.txt +asked-- pg3200.txt +asked--dimly pg3200.txt +asked. pg3200.txt +asked." pg3200.txt +asked: pg3200.txt +asked; pg3200.txt +asking pg31100.txt, pg3200.txt, pg100.txt +asking, pg31100.txt, pg3200.txt +asking. pg3200.txt, pg100.txt +asking? pg100.txt +asks pg31100.txt, pg3200.txt +asleep pg3200.txt, pg100.txt +asleep! pg3200.txt, pg100.txt +asleep!" pg31100.txt +asleep) pg3200.txt +asleep, pg3200.txt, pg100.txt +asleep- pg100.txt +asleep--with pg3200.txt +asleep. pg31100.txt, pg3200.txt, pg100.txt +asleep." pg31100.txt, pg3200.txt +asleep; pg31100.txt, pg3200.txt, pg100.txt +asleep? pg100.txt +asleep?" pg3200.txt +asleep] pg100.txt +asmath, pg100.txt +asp." pg31100.txt +asp] pg100.txt +asparagus. pg3200.txt +aspect pg31100.txt, pg3200.txt, pg100.txt +aspect! pg100.txt +aspect, pg3200.txt, pg100.txt +aspect-- pg3200.txt +aspect--the pg3200.txt +aspect. pg31100.txt, pg3200.txt, pg100.txt +aspect: pg3200.txt +aspect; pg3200.txt, pg100.txt +aspects pg3200.txt +aspects, pg100.txt +aspects. pg3200.txt +aspects." pg3200.txt +aspects; pg100.txt +asperity-- pg3200.txt +asperity: pg3200.txt +aspiration, pg3200.txt +aspirations; pg3200.txt +aspire pg3200.txt +aspire, pg100.txt +aspired. pg31100.txt +aspires pg31100.txt +aspiring pg31100.txt +asquint. pg100.txt +ass pg3200.txt, pg100.txt +ass! pg100.txt +ass!" pg3200.txt +ass's pg100.txt +ass, pg3200.txt, pg100.txt +ass- pg100.txt +ass--they pg3200.txt +ass. pg3200.txt, pg100.txt +ass." pg3200.txt +ass; pg100.txt +assail pg31100.txt, pg3200.txt +assail!) pg3200.txt +assail? pg100.txt +assailable. pg3200.txt, pg100.txt +assailants. pg3200.txt, pg100.txt +assailed pg3200.txt +assailed. pg100.txt +assailed.... pg3200.txt +assails pg3200.txt +assassin pg3200.txt +assassin!" pg3200.txt +assassin, pg3200.txt +assassin." pg3200.txt +assassin?" pg3200.txt +assassinate. pg3200.txt +assassinated pg3200.txt +assassinated! pg3200.txt +assassinated.' pg3200.txt +assassinating pg3200.txt +assassination pg3200.txt, pg100.txt +assassins pg3200.txt +assassins, pg3200.txt +assault pg3200.txt +assault!" pg3200.txt +assault, pg3200.txt +assault. pg3200.txt, pg100.txt +assault." pg3200.txt +assault: pg3200.txt +assault? pg100.txt +assaulted pg3200.txt +assaulted, pg100.txt +assaults pg3200.txt, pg100.txt +assaults) pg3200.txt +assaults, pg3200.txt +assay pg3200.txt, pg100.txt +assay!" pg3200.txt +assay. pg3200.txt, pg100.txt +assay? pg100.txt +assayer pg3200.txt +assayers pg3200.txt +assaying pg100.txt +assays, pg100.txt +assemblage pg3200.txt +assemblage, pg3200.txt +assemblage--all pg3200.txt +assemblage. pg3200.txt +assemblage: pg3200.txt +assemblage; pg3200.txt +assemble pg3200.txt +assemble: pg100.txt +assemble; pg100.txt +assembled pg31100.txt, pg3200.txt, pg100.txt +assembled, pg31100.txt +assembled. pg31100.txt +assembled: pg3200.txt +assembled; pg31100.txt +assemblies pg3200.txt, pg100.txt +assemblies, pg31100.txt +assemblies. pg3200.txt +assembling pg31100.txt +assembling-place; pg3200.txt +assembly pg31100.txt, pg3200.txt, pg100.txt +assembly. pg31100.txt, pg3200.txt, pg100.txt +assembly? pg100.txt +assembly] pg3200.txt +assent pg3200.txt, pg100.txt +assent, pg31100.txt, pg3200.txt +assent. pg31100.txt, pg3200.txt +assent.] pg3200.txt +assented pg31100.txt, pg3200.txt +assents pg31100.txt +assert pg3200.txt +asserted pg31100.txt, pg3200.txt +asserting pg31100.txt, pg3200.txt +assertion pg3200.txt +assertion, pg31100.txt +assertion. pg31100.txt, pg3200.txt +assertion." pg3200.txt +assertion? pg3200.txt +assertive pg3200.txt +assertive, pg3200.txt +asses, pg3200.txt +asses. pg100.txt +assessment" pg3200.txt +assessment, pg3200.txt +assessment." pg3200.txt +assessments pg3200.txt +asset. pg3200.txt +assets pg3200.txt +assets? pg3200.txt +assiduously pg31100.txt +assign pg31100.txt +assign'd pg100.txt +assign'd; pg100.txt +assigned pg31100.txt, pg3200.txt +assigned, pg3200.txt +assigned; pg100.txt +assignment pg3200.txt +assigns pg3200.txt +assimilating pg3200.txt +assist pg31100.txt, pg3200.txt, pg100.txt +assist." pg3200.txt +assistance pg31100.txt, pg3200.txt +assistance, pg31100.txt, pg3200.txt +assistance. pg31100.txt, pg3200.txt, pg100.txt +assistance." pg31100.txt +assistance; pg31100.txt +assistances; pg100.txt +assistant pg3200.txt +assistant, pg100.txt +assistant. pg3200.txt +assistants pg3200.txt +assistants, pg3200.txt +assistants. pg31100.txt, pg3200.txt +assistants; pg3200.txt +assisted pg31100.txt, pg3200.txt, pg100.txt +assisted, pg3200.txt +assisted. pg31100.txt +assisted; pg3200.txt +assisting pg31100.txt +associate pg31100.txt, pg3200.txt, pg100.txt +associated pg31100.txt, pg3200.txt, pg100.txt +associates pg3200.txt +associates, pg31100.txt +associates. pg3200.txt +associates.. pg3200.txt +associating pg3200.txt +association pg31100.txt, pg3200.txt +association) pg3200.txt +association, pg3200.txt +association. pg3200.txt +association." pg31100.txt +association.' pg3200.txt +association; pg3200.txt +association?' pg3200.txt +associations pg3200.txt +associations, pg31100.txt +associations. pg3200.txt +assorted, pg3200.txt +assortment pg3200.txt +assuage pg100.txt +assuage, pg100.txt +assuagement pg3200.txt +assuaging pg3200.txt +assume pg31100.txt, pg3200.txt +assume" pg3200.txt +assume, pg3200.txt +assume--that pg3200.txt +assume. pg31100.txt +assume: pg3200.txt +assumed pg3200.txt +assumed, pg31100.txt +assumed. pg3200.txt +assumed." pg3200.txt +assumes pg100.txt +assumes- pg100.txt +assuming pg3200.txt +assuming" pg3200.txt +assur'd pg100.txt +assur'd, pg100.txt +assur'd. pg100.txt +assurance pg31100.txt, pg3200.txt, pg100.txt +assurance, pg31100.txt, pg100.txt +assurance. pg31100.txt +assurance; pg100.txt +assurance? pg3200.txt +assurances pg31100.txt +assurances, pg31100.txt +assurances. pg31100.txt +assurances; pg31100.txt +assure pg31100.txt, pg3200.txt, pg100.txt +assure. pg100.txt +assured pg31100.txt, pg3200.txt, pg100.txt +assured, pg31100.txt, pg100.txt +assured. pg3200.txt +assuredly pg3200.txt +assuredly. pg100.txt +assures pg3200.txt +assuring pg31100.txt, pg3200.txt +astern pg3200.txt +astern, pg3200.txt +astern. pg3200.txt +asteroid. pg3200.txt +asthma, pg3200.txt +astir pg3200.txt +astir. pg3200.txt +astley's, pg31100.txt +astley's. pg31100.txt +aston pg3200.txt +astonish pg31100.txt, pg3200.txt +astonished pg31100.txt, pg3200.txt +astonished! pg3200.txt +astonished, pg3200.txt +astonished--a-s-t-o-n-i-s-h-e-d pg3200.txt +astonished. pg31100.txt, pg3200.txt, pg100.txt +astonishing pg31100.txt, pg3200.txt +astonishing!" pg31100.txt, pg3200.txt +astonishing, pg3200.txt +astonishing. pg3200.txt +astonishment pg31100.txt, pg3200.txt +astonishment!--but pg31100.txt +astonishment, pg31100.txt, pg3200.txt +astonishment. pg31100.txt, pg3200.txt +astonishment.--that pg31100.txt +astonishment: pg3200.txt +astonishment; pg31100.txt +astoria, pg3200.txt +astounded pg3200.txt +astounding pg3200.txt +astounding!" pg3200.txt +astounding. pg3200.txt +astray pg3200.txt, pg100.txt +astray!" pg3200.txt +astray. pg31100.txt +astray; pg31100.txt +astride pg3200.txt +astride, pg3200.txt +astride. pg3200.txt +astrologer pg3200.txt +astrologer, pg3200.txt +astronomer pg100.txt +astronomer. pg3200.txt +astronomers, pg3200.txt +astronomical? pg100.txt +astronomy, pg100.txt +astronomy--entirely pg3200.txt +asturias; pg3200.txt +asunder pg3200.txt, pg100.txt +asunder, pg100.txt +asunder--and pg3200.txt +asunder. pg31100.txt, pg3200.txt, pg100.txt +asunder." pg3200.txt +asunder.- pg100.txt +asunder; pg100.txt +asylum pg3200.txt +asylum, pg3200.txt +asylum--so pg3200.txt +asylum. pg3200.txt +asylums. pg3200.txt +at! pg100.txt +at, pg31100.txt, pg3200.txt, pg100.txt +at- pg100.txt +at--" pg3200.txt +at--and pg3200.txt +at--at-- pg3200.txt +at. pg31100.txt, pg3200.txt, pg100.txt +at." pg31100.txt, pg3200.txt +at: pg3200.txt, pg100.txt +at; pg31100.txt, pg3200.txt, pg100.txt +at? pg31100.txt, pg3200.txt, pg100.txt +at?" pg3200.txt +atalanta's pg100.txt +atchison, pg3200.txt +ate pg31100.txt, pg3200.txt +ate, pg3200.txt +ate. pg3200.txt +ate." pg3200.txt +atheists pg3200.txt +atheists, pg3200.txt +athenaeum. pg3200.txt +athenian pg3200.txt, pg100.txt +athenian. pg100.txt +athenians pg100.txt +athenians? pg100.txt +athens pg3200.txt, pg100.txt +athens! pg3200.txt +athens, pg100.txt +athens. pg3200.txt, pg100.txt +athens: pg100.txt +athens? pg100.txt +athlete pg3200.txt +athlete!--yet pg3200.txt +athlete. pg3200.txt +athletes pg3200.txt +athletic pg3200.txt +athol, pg100.txt +athunder! pg3200.txt +athwart pg3200.txt, pg100.txt +atkinson. pg3200.txt +atkinson.]} pg3200.txt +atlanta pg3200.txt +atlanta: pg3200.txt +atlantic pg3200.txt +atlantic, pg3200.txt +atlantic--but pg3200.txt +atlantic. pg3200.txt +atlantic?" pg3200.txt +atmosphere pg3200.txt +atmosphere" pg3200.txt +atmosphere, pg3200.txt +atmosphere. pg31100.txt, pg3200.txt +atmosphere." pg3200.txt +atmosphere; pg3200.txt +atmospheres pg3200.txt +atomies pg100.txt +atomies, pg100.txt +atoms pg3200.txt +atoms. pg3200.txt +atone pg100.txt +atonement pg31100.txt, pg100.txt +atonement; pg31100.txt +atonements pg100.txt +atop pg3200.txt +atrocious pg3200.txt +atrocious. pg3200.txt +atrocities pg3200.txt +atrocities? pg31100.txt +atrocity pg3200.txt +atrophied, pg3200.txt +att pg3200.txt +attach pg31100.txt, pg3200.txt +attach'd pg100.txt +attach'd; pg100.txt +attach. pg100.txt +attache pg3200.txt +attached pg31100.txt, pg3200.txt +attached, pg31100.txt, pg3200.txt +attached. pg3200.txt +attached." pg31100.txt +attached; pg31100.txt, pg3200.txt +attaches pg3200.txt +attaching pg3200.txt +attachment pg31100.txt, pg3200.txt +attachment, pg31100.txt +attachment--" pg31100.txt +attachment--i pg31100.txt +attachment. pg31100.txt +attachment; pg31100.txt +attachments, pg31100.txt +attachments. pg31100.txt +attachments." pg31100.txt +attack pg31100.txt, pg3200.txt +attack, pg31100.txt, pg3200.txt +attack. pg31100.txt, pg3200.txt +attacked pg31100.txt, pg3200.txt +attacked. pg3200.txt +attacking pg31100.txt, pg3200.txt +attacking, pg31100.txt +attacks pg31100.txt, pg3200.txt +attain pg3200.txt, pg100.txt +attain'd pg100.txt +attain'd, pg100.txt +attain, pg100.txt +attain. pg3200.txt +attainable, pg31100.txt +attainable-- pg3200.txt +attainable. pg31100.txt +attainder pg3200.txt +attainder, pg100.txt +attained pg3200.txt +attained, pg31100.txt +attained. pg3200.txt +attaining pg3200.txt +attaining; pg31100.txt +attaint pg100.txt +attaint, pg100.txt +attaint? pg100.txt +attainted, pg100.txt +attainted--" pg3200.txt +attainted: pg100.txt +attainted; pg100.txt +attempt pg31100.txt, pg3200.txt, pg100.txt +attempt! pg3200.txt +attempt, pg31100.txt, pg3200.txt, pg100.txt +attempt. pg3200.txt, pg100.txt +attempt." pg31100.txt, pg3200.txt +attempt: pg3200.txt +attempted pg31100.txt, pg3200.txt +attempted!" pg31100.txt +attempted, pg100.txt +attempted. pg31100.txt, pg3200.txt +attempted? pg100.txt +attempting pg31100.txt, pg3200.txt, pg100.txt +attempts pg31100.txt, pg3200.txt, pg100.txt +attempts, pg100.txt +attempts. pg100.txt +attend pg31100.txt, pg3200.txt, pg100.txt +attend). pg3200.txt +attend, pg31100.txt, pg3200.txt, pg100.txt +attend. pg100.txt +attend: pg100.txt +attend; pg100.txt +attendance pg31100.txt, pg3200.txt, pg100.txt +attendance. pg31100.txt, pg3200.txt +attendant pg3200.txt, pg100.txt +attendant. pg100.txt +attendant.] pg100.txt +attendants pg3200.txt, pg100.txt +attendants, pg3200.txt, pg100.txt +attendants. pg100.txt +attendants.' pg3200.txt +attendants.] pg100.txt +attendants]. pg100.txt +attended pg31100.txt, pg3200.txt, pg100.txt +attended, pg31100.txt, pg3200.txt +attended. pg3200.txt, pg100.txt +attended." pg3200.txt +attended; pg100.txt +attendents pg100.txt +attending pg31100.txt, pg3200.txt, pg100.txt +attending. pg31100.txt, pg100.txt +attending; pg100.txt +attendono pg3200.txt +attendre). pg3200.txt +attends pg3200.txt, pg100.txt +attends, pg100.txt +attention pg31100.txt, pg3200.txt, pg100.txt +attention" pg3200.txt +attention, pg31100.txt, pg3200.txt +attention--he pg3200.txt +attention--she pg3200.txt +attention. pg31100.txt, pg3200.txt, pg100.txt +attention." pg3200.txt +attention; pg31100.txt, pg3200.txt +attention? pg100.txt +attention?" pg3200.txt +attentions pg31100.txt, pg3200.txt +attentions, pg31100.txt, pg3200.txt +attentions. pg31100.txt, pg3200.txt +attentions." pg31100.txt +attentions; pg31100.txt +attentive pg31100.txt +attentive, pg3200.txt +attentive. pg31100.txt, pg100.txt +attentive; pg31100.txt, pg100.txt +attentively pg3200.txt +attentiveness pg3200.txt +attest pg3200.txt, pg100.txt +attested pg3200.txt +attic." pg3200.txt +attire pg3200.txt, pg100.txt +attire, pg3200.txt, pg100.txt +attire. pg3200.txt, pg100.txt +attire; pg31100.txt +attire? pg100.txt +attired pg31100.txt, pg3200.txt, pg100.txt +attireth pg3200.txt +attitude pg31100.txt, pg3200.txt +attitude!" pg3200.txt +attitude, pg3200.txt +attitude. pg3200.txt +attitude." pg31100.txt +attitude: pg3200.txt +attitudes pg3200.txt +attitudes, pg3200.txt +attitudes. pg3200.txt +attitudinizing pg3200.txt +attorney pg31100.txt, pg3200.txt, pg100.txt +attorney!" pg3200.txt +attorney, pg3200.txt, pg100.txt +attorney-general pg3200.txt +attorney. pg3200.txt +attorney.) pg3200.txt +attorneys pg3200.txt +attorneyship; pg100.txt +attract pg3200.txt +attracted pg31100.txt, pg3200.txt +attracted. pg31100.txt +attracting pg3200.txt +attraction pg31100.txt, pg3200.txt, pg100.txt +attraction! pg3200.txt +attraction, pg31100.txt, pg3200.txt +attraction. pg31100.txt, pg3200.txt +attraction." pg3200.txt +attraction? pg31100.txt +attractions pg3200.txt +attractions, pg31100.txt +attractions. pg31100.txt, pg3200.txt +attractive pg31100.txt, pg3200.txt +attractive, pg31100.txt +attractive. pg3200.txt, pg100.txt +attractive; pg31100.txt, pg3200.txt +attracts pg3200.txt +attribute pg31100.txt +attribute. pg100.txt +attributed pg31100.txt, pg3200.txt +attributes pg3200.txt, pg100.txt +attributes? pg100.txt +attributing pg31100.txt, pg3200.txt +attributive pg100.txt +atwain pg100.txt +atwain, pg100.txt +atwater pg3200.txt +atwill, pg3200.txt +au-dessus pg3200.txt +aubrey pg3200.txt +aubrey, pg3200.txt +auburn. pg3200.txt +auch pg3200.txt +auch. pg3200.txt +auckland. pg3200.txt +auction pg3200.txt +auction--for pg3200.txt +auctioneer. pg3200.txt +aucun pg100.txt +audacious pg3200.txt +audaciously.' pg100.txt +audacity pg3200.txt +audacity? pg100.txt +audible pg31100.txt, pg3200.txt +audible. pg31100.txt, pg3200.txt +audible: pg3200.txt +audibly pg3200.txt +audibly. pg3200.txt +audience pg31100.txt, pg3200.txt, pg100.txt +audience, pg31100.txt, pg3200.txt, pg100.txt +audience," pg3200.txt +audience. pg31100.txt, pg3200.txt, pg100.txt +audience: pg3200.txt, pg100.txt +audience; pg3200.txt +audience?" pg3200.txt +audiences pg3200.txt +audiences." pg3200.txt +audit pg3200.txt +auditor pg31100.txt, pg3200.txt +auditor; pg100.txt +auditors pg100.txt +auditors. pg3200.txt +auditory, pg3200.txt +audrey pg100.txt +audrey. pg100.txt +audrey; pg100.txt +aufidius pg100.txt +aufidius! pg100.txt +aufidius, pg100.txt +aufidius? pg100.txt +aufstehen. pg3200.txt +aufzuschreiben. pg3200.txt +augen pg3200.txt +aught pg3200.txt, pg100.txt +aught, pg100.txt +aught,- pg100.txt +aught. pg100.txt +aught? pg100.txt +augment pg31100.txt, pg3200.txt +augmented pg31100.txt, pg3200.txt, pg100.txt +augmented, pg100.txt +augmented." pg3200.txt +augmented; pg3200.txt +augmenting pg3200.txt +augmenting. pg3200.txt +augments pg3200.txt +augur pg31100.txt +augurer: pg100.txt +augurers pg100.txt +augurers? pg100.txt +augurs. pg3200.txt +augury, pg3200.txt +augury. pg3200.txt +august pg31100.txt, pg3200.txt +august, pg3200.txt +august. pg3200.txt +august." pg31100.txt +augusta pg31100.txt +augusta) pg31100.txt +augusta). pg31100.txt +augusta,.........200 pg3200.txt +augusta.) pg31100.txt +augusta? pg31100.txt +augusta?" pg31100.txt +augustus pg31100.txt, pg3200.txt +augustus! pg31100.txt +augustus. pg31100.txt +augustus." pg31100.txt +aumerle pg100.txt +aumerle, pg100.txt +aumerle. pg100.txt +aumerle; pg100.txt +aumerle? pg100.txt +aunchient pg100.txt +aunt pg31100.txt, pg3200.txt, pg100.txt +aunt! pg31100.txt +aunt!" pg31100.txt, pg3200.txt +aunt's pg31100.txt, pg3200.txt +aunt, pg31100.txt +aunt- pg100.txt +aunt. pg31100.txt, pg100.txt +aunt." pg3200.txt +aunt.] pg3200.txt +aunt: pg31100.txt +aunt; pg31100.txt, pg3200.txt, pg100.txt +aunt?" pg31100.txt, pg3200.txt +auntie pg3200.txt +auntie." pg3200.txt +aunts pg3200.txt +aunts, pg3200.txt, pg100.txt +aunts." pg31100.txt, pg3200.txt +aunty!" pg3200.txt +aunty, pg3200.txt +aunty?" pg3200.txt +aur pg3200.txt +aurait pg3200.txt +aurangzeb. pg3200.txt +auriferous--pertaining pg3200.txt +aurora, pg3200.txt +aus----. pg3200.txt +ausgehen. pg3200.txt +ausgleich pg3200.txt +auspicious! pg100.txt +auspicious. pg31100.txt +aussi pg3200.txt +austen pg31100.txt +austen, pg31100.txt +austen--is pg31100.txt +austere pg3200.txt +austere, pg3200.txt +austerely: pg3200.txt +austerities, pg3200.txt +austerity: pg3200.txt +austin, pg3200.txt +australasia pg3200.txt +australasia's pg3200.txt +australasia. pg3200.txt +australasia." pg3200.txt +australasian's pg3200.txt +australia pg3200.txt +australia, pg3200.txt +australia--to pg3200.txt +australia. pg3200.txt +australia; pg3200.txt +australian pg3200.txt +australian. pg3200.txt +austria pg3200.txt, pg100.txt +austria, pg3200.txt, pg100.txt +austria. pg3200.txt, pg100.txt +austrian pg3200.txt +austrians pg3200.txt +austrians, pg3200.txt +auswendig! pg3200.txt +authentic pg3200.txt, pg100.txt +authentic--it pg3200.txt +authentic. pg3200.txt +authentic; pg3200.txt +authenticity pg3200.txt +authenticity. pg3200.txt +author pg31100.txt, pg3200.txt, pg100.txt +author's pg3200.txt +author's; pg3200.txt +author, pg3200.txt +author--never pg31100.txt +author-publisher. pg3200.txt +author. pg31100.txt, pg3200.txt +author." pg3200.txt +author.] pg3200.txt +author; pg3200.txt +authoring; pg3200.txt +authorise. pg31100.txt +authorised pg31100.txt, pg3200.txt +authorising pg31100.txt +authoritative pg31100.txt +authoritative; pg3200.txt +authorities pg3200.txt, pg100.txt +authorities'-- pg3200.txt +authorities, pg3200.txt +authorities--are pg3200.txt +authorities. pg3200.txt +authorities." pg3200.txt +authority pg31100.txt, pg3200.txt, pg100.txt +authority, pg31100.txt, pg3200.txt, pg100.txt +authority-- pg3200.txt +authority. pg31100.txt, pg3200.txt, pg100.txt +authority." pg31100.txt, pg3200.txt +authority: pg31100.txt +authority; pg100.txt +authority?" pg3200.txt +authorized pg31100.txt, pg3200.txt +authors pg3200.txt, pg100.txt +authors!" pg3200.txt +authors, pg3200.txt +authors. pg3200.txt, pg100.txt +authors.] pg3200.txt +authors?" pg3200.txt +authorship pg3200.txt +authorship, pg3200.txt +authorship. pg3200.txt +auto-biography pg3200.txt +autobiography pg3200.txt +autobiography) pg3200.txt +autobiography, pg3200.txt +autobiography. pg3200.txt +autobiography: pg3200.txt +autocracies, pg3200.txt +autocrat pg3200.txt +autocrat. pg3200.txt +autograph pg3200.txt +autograph, pg3200.txt +autograph. pg3200.txt +autographs pg3200.txt +autographs, pg3200.txt +autolycus pg100.txt +autolycus. pg100.txt +automatic pg3200.txt +automatic, pg3200.txt +automatically pg3200.txt +automatically--instantly pg3200.txt +autour pg3200.txt +autre. pg3200.txt +autumn pg3200.txt +autumn! pg100.txt +autumn, pg31100.txt, pg3200.txt +autumn-butter pg3200.txt +autumn. pg31100.txt +autumn; pg31100.txt +auvergne pg100.txt +auvergne, pg100.txt +auxiliaries pg3200.txt +auxiliary pg3200.txt +av--" pg3200.txt +avail, pg31100.txt, pg100.txt +avail. pg3200.txt +avail? pg100.txt +availability pg3200.txt +available pg3200.txt +availed pg31100.txt +avails; pg100.txt +avait pg3200.txt +avalanche pg3200.txt +avalanche! pg3200.txt +avalanche. pg3200.txt +avalanches pg3200.txt +avalanches; pg3200.txt +avarice pg31100.txt, pg100.txt +avaricious pg3200.txt +avaunt! pg100.txt +ave. pg3200.txt +avec pg3200.txt +avenge pg3200.txt +avenged pg3200.txt +avenged! pg3200.txt +avenue pg3200.txt +avenue, pg3200.txt +avenue." pg31100.txt +avenues pg3200.txt +avenues--hundreds pg3200.txt +avenues. pg3200.txt +aver pg3200.txt +aver-- pg3200.txt +aver. pg3200.txt +average pg3200.txt +average, pg3200.txt +average. pg3200.txt +average; pg3200.txt +averages pg3200.txt +averaging pg3200.txt +averse pg3200.txt +aversion pg31100.txt, pg3200.txt +aversion, pg31100.txt +aversions, pg3200.txt +avert pg3200.txt +averted, pg31100.txt +averted; pg3200.txt +averting pg31100.txt +avidity pg3200.txt +avignon pg3200.txt +avignon, pg3200.txt +avignon--yet pg3200.txt +avignon. pg3200.txt +avoid pg31100.txt, pg3200.txt, pg100.txt +avoid, pg31100.txt, pg100.txt +avoid--and pg3200.txt +avoid. pg3200.txt, pg100.txt +avoid; pg3200.txt, pg100.txt +avoid? pg100.txt +avoided pg31100.txt, pg3200.txt, pg100.txt +avoided, pg3200.txt, pg100.txt +avoided. pg31100.txt, pg3200.txt +avoided; pg31100.txt +avoiding pg31100.txt, pg3200.txt +avoiding, pg31100.txt +avoids pg100.txt +avoir pg3200.txt +avoirdupois. pg3200.txt, pg100.txt +avon; pg3200.txt +avouch pg100.txt +avouch; pg100.txt +avow- pg100.txt +avowal pg31100.txt, pg3200.txt +avowed pg31100.txt +avril, pg3200.txt +awa pg3200.txt +await pg31100.txt +awaited pg3200.txt +awaits pg3200.txt +awak'd. pg100.txt +awak'd; pg100.txt +awake pg31100.txt, pg3200.txt, pg100.txt +awake! pg100.txt +awake!" pg100.txt +awake, pg3200.txt, pg100.txt +awake. pg3200.txt, pg100.txt +awake." pg3200.txt +awaked pg100.txt +awaken pg31100.txt +awakened pg31100.txt +awakened! pg3200.txt +awakened. pg31100.txt +awakenings pg3200.txt +awakes? pg100.txt +awaking pg3200.txt, pg100.txt +awaking: pg100.txt +award pg3200.txt, pg100.txt +awarded pg3200.txt +awarded. pg3200.txt +awards pg100.txt +aware pg31100.txt, pg3200.txt +aware, pg31100.txt +aware. pg3200.txt +aware." pg31100.txt +aware; pg31100.txt +awast, pg3200.txt +away pg31100.txt, pg3200.txt, pg100.txt +away! pg31100.txt, pg3200.txt, pg100.txt +away!" pg31100.txt, pg3200.txt +away'? pg100.txt +away), pg3200.txt +away, pg31100.txt, pg3200.txt, pg100.txt +away," pg3200.txt +away,") pg3200.txt +away,' pg3200.txt +away- pg100.txt +away-- pg3200.txt +away--$3 pg3200.txt +away--an pg31100.txt +away--but pg31100.txt +away--by, pg3200.txt +away--ever pg3200.txt +away--for pg3200.txt +away--horrible! pg3200.txt +away--it pg3200.txt +away--let's----" pg3200.txt +away--or pg3200.txt +away--ropes pg3200.txt +away--she pg3200.txt +away--so pg3200.txt +away--somewhat pg3200.txt +away--the pg3200.txt +away--you pg31100.txt, pg3200.txt +away. pg31100.txt, pg3200.txt, pg100.txt +away." pg31100.txt, pg3200.txt, pg100.txt +away.' pg3200.txt, pg100.txt +away.] pg100.txt +away: pg100.txt +away; pg31100.txt, pg3200.txt, pg100.txt +away? pg3200.txt, pg100.txt +away?" pg31100.txt, pg3200.txt +away?' pg3200.txt +away?- pg100.txt +away] pg100.txt +awe pg31100.txt, pg3200.txt, pg100.txt +awe, pg31100.txt, pg3200.txt, pg100.txt +awe-compelling pg3200.txt +awe-inspiring pg3200.txt +awe. pg3200.txt, pg100.txt +awe: pg3200.txt +awe? pg100.txt +aweary pg100.txt +awed pg31100.txt, pg3200.txt +awestruck. pg3200.txt +awful pg31100.txt, pg3200.txt +awful! pg3200.txt +awful!" pg3200.txt +awful, pg3200.txt +awful--and pg3200.txt +awful--but pg3200.txt +awful." pg3200.txt +awfully pg3200.txt +awfulness. pg3200.txt +awhile pg3200.txt, pg100.txt +awhile! pg100.txt +awhile, pg3200.txt, pg100.txt +awhile- pg100.txt +awhile. pg3200.txt, pg100.txt +awhile." pg3200.txt +awhile.' pg3200.txt +awhile; pg3200.txt, pg100.txt +awhile? pg100.txt +awkward pg31100.txt, pg3200.txt +awkward, pg3200.txt +awkward. pg31100.txt +awkward." pg3200.txt +awkwardest, pg3200.txt +awkwardly pg31100.txt, pg3200.txt +awkwardness pg31100.txt, pg3200.txt +awkwardness. pg31100.txt +awkwardness." pg31100.txt +awkwardnesses pg31100.txt +awl; pg100.txt +awluz pg3200.txt +awning, pg3200.txt +awning-posts. pg3200.txt +awning. pg3200.txt +awning; pg3200.txt +awnings pg3200.txt +awoke pg31100.txt, pg3200.txt +awoke, pg31100.txt +awork; pg100.txt +awry pg100.txt +awry, pg100.txt +awry. pg100.txt +awry; pg100.txt +ax pg3200.txt +ax!" pg3200.txt +ax, pg3200.txt +ax. pg3200.txt +ax; pg3200.txt +ax?" pg3200.txt +axe pg3200.txt, pg100.txt +axe! pg3200.txt +axe, pg3200.txt, pg100.txt +axe. pg3200.txt, pg100.txt +axes pg3200.txt +axes, pg3200.txt +axle-tree pg100.txt +axletree, pg100.txt +ay! pg100.txt +ay, pg100.txt +ay. pg100.txt +ay; pg100.txt +ay? pg100.txt +ayah pg3200.txt +aye pg100.txt +aye, pg31100.txt, pg3200.txt +ayes pg3200.txt +aylmers pg31100.txt +azores pg3200.txt +azores. pg3200.txt +aztec pg3200.txt +b'_nan_ners, pg3200.txt +b'fo'." pg3200.txt +b'gosh!' pg3200.txt +b'lieve pg3200.txt +b'long' pg3200.txt +b'longed pg3200.txt +b, pg3200.txt +b---- pg3200.txt +b---the pg3200.txt +b--out. pg3200.txt +b. pg31100.txt, pg3200.txt +b." pg3200.txt +b.'?" pg3200.txt +b.'s pg3200.txt +b.]} pg3200.txt +b.a. pg3200.txt +baal pg3200.txt +baal, pg3200.txt +baal," pg3200.txt +baalbec pg3200.txt +baalbec, pg3200.txt +baalbec. pg3200.txt +baalbec; pg3200.txt +babe pg3200.txt, pg100.txt +babe! pg100.txt +babe, pg100.txt +babe- pg100.txt +babe. pg3200.txt, pg100.txt +babe." pg3200.txt +babe: pg100.txt +babel, pg3200.txt +babel. pg3200.txt +babel; pg3200.txt +babes pg100.txt +babes! pg100.txt +babes'- pg100.txt +babes, pg100.txt +babes. pg3200.txt +babes? pg100.txt +babies pg3200.txt +babies, pg3200.txt +babies--he pg3200.txt +babies--wedding pg3200.txt +babies. pg3200.txt +baboon. pg100.txt +babtis' pg3200.txt +baby pg31100.txt, pg3200.txt, pg100.txt +baby, pg31100.txt, pg3200.txt +baby. pg3200.txt, pg100.txt +baby.-here pg100.txt +baby?' pg3200.txt +babyhood." pg3200.txt +babylon, pg100.txt +babylon. pg3200.txt, pg100.txt +babylon: pg3200.txt +bacchanals pg100.txt +bacchanals, pg100.txt +bach'lorship. pg100.txt +bachelor pg3200.txt +bachelor, pg100.txt +bachelor. pg31100.txt, pg100.txt +bachelor; pg100.txt +bachelor? pg100.txt +bachelors--heroically pg3200.txt +bacilli pg3200.txt +back pg31100.txt, pg3200.txt, pg100.txt +back! pg3200.txt, pg100.txt +back!" pg3200.txt +back'd, pg100.txt +back'd; pg100.txt +back, pg31100.txt, pg3200.txt, pg100.txt +back," pg3200.txt +back,' pg3200.txt +back- pg100.txt +back--" pg3200.txt +back--but pg3200.txt +back--he pg3200.txt +back--you pg3200.txt +back-ache, pg3200.txt +back-alley pg3200.txt +back-breaking pg3200.txt +back-door, pg100.txt +back-room pg3200.txt +back-settlement pg3200.txt +back. pg31100.txt, pg3200.txt, pg100.txt +back." pg31100.txt, pg3200.txt +back.' pg3200.txt +back..... pg3200.txt +back.] pg100.txt +back: pg3200.txt, pg100.txt +back; pg31100.txt, pg3200.txt, pg100.txt +back? pg3200.txt, pg100.txt +back?" pg31100.txt, pg3200.txt +back?' pg3200.txt +back] pg100.txt +backards." pg3200.txt +backbite. pg100.txt +backbone pg3200.txt +backed pg31100.txt, pg3200.txt +backed. pg31100.txt +backer pg3200.txt +backer; pg3200.txt +backers, pg3200.txt +backgammon. pg31100.txt +background pg3200.txt +background. pg3200.txt +background." pg3200.txt +background; pg31100.txt +backhanded pg3200.txt +backs pg3200.txt, pg100.txt +backs, pg3200.txt, pg100.txt +backs--great pg3200.txt +backs. pg3200.txt, pg100.txt +backs; pg3200.txt +backstitched pg3200.txt +backus pg3200.txt +backus's pg3200.txt +backus, pg3200.txt +backus. pg3200.txt +backus.' pg3200.txt +backward pg3200.txt +backward, pg3200.txt +backward. pg3200.txt, pg100.txt +backward; pg3200.txt, pg100.txt +backwardly pg100.txt +backwardness pg31100.txt +backwards pg31100.txt, pg3200.txt +backwards, pg3200.txt +backwards. pg31100.txt, pg3200.txt +backways. pg3200.txt +backwoods pg3200.txt +backwoodsmen pg3200.txt +bacon pg3200.txt +bacon's pg3200.txt +bacon, pg3200.txt +bacon--if pg3200.txt +bacon-shakespeare pg3200.txt +bacon. pg3200.txt +bacon? pg3200.txt +baconian, pg3200.txt +bacterium pg3200.txt +bacterium? pg3200.txt +bad pg31100.txt, pg3200.txt, pg100.txt +bad! pg100.txt +bad!" pg31100.txt, pg3200.txt +bad, pg31100.txt, pg3200.txt, pg100.txt +bad-- pg3200.txt +bad--'" pg3200.txt +bad--not pg3200.txt +bad--the pg3200.txt +bad-nauheim. pg3200.txt +bad. pg31100.txt, pg3200.txt, pg100.txt +bad." pg31100.txt, pg3200.txt +bad; pg3200.txt, pg100.txt +bad? pg100.txt +bad?" pg31100.txt, pg3200.txt +baddeley, pg31100.txt +bade pg3200.txt, pg100.txt +baden-baden pg3200.txt +baden-baden, pg3200.txt +badeni pg3200.txt +badge pg3200.txt +badge, pg100.txt +badge-holders pg3200.txt +badge. pg3200.txt, pg100.txt +badger!" pg3200.txt +badgered pg3200.txt +badgered, pg3200.txt +badgering pg3200.txt +badgers pg3200.txt +badly pg3200.txt +badly, pg3200.txt +badly. pg3200.txt +badly; pg3200.txt +badness pg31100.txt +badness. pg100.txt +baedeker pg3200.txt +baedeker, pg3200.txt +baedeker. pg3200.txt +baedeker." pg3200.txt +baedeker; pg3200.txt +baes pg100.txt +baffled? pg100.txt +baffling pg3200.txt +baffling. pg3200.txt +bag pg3200.txt +bag, pg3200.txt +bag-piper; pg100.txt +bag. pg3200.txt +bag." pg3200.txt +bag.' pg3200.txt +bagasse pg3200.txt +bagdad pg3200.txt +bagdemagus, pg3200.txt +baggage pg3200.txt +baggage! pg100.txt +baggage, pg3200.txt +baggage. pg3200.txt +baggage." pg3200.txt +baggage?" pg3200.txt +bagged pg3200.txt +bagger!" pg3200.txt +bagley pg3200.txt +bagnio pg3200.txt +bagot pg100.txt +bagot, pg100.txt +bagot. pg100.txt +bagot? pg100.txt +bagpipe pg100.txt +bagpipe. pg100.txt +bags pg3200.txt, pg100.txt +bags! pg100.txt +bags. pg3200.txt +bags; pg100.txt +bahadur pg3200.txt +bahsket, pg3200.txt +bail pg3200.txt +bail, pg100.txt +bail. pg100.txt +bail; pg100.txt +bailey pg3200.txt +bailiff pg31100.txt +bailiff, pg3200.txt +bailiff; pg31100.txt, pg100.txt +bailiffs, pg3200.txt +bailly pg3200.txt +bait pg3200.txt, pg100.txt +bait, pg100.txt +bait. pg3200.txt, pg100.txt +bait? pg100.txt +baited pg3200.txt, pg100.txt +baited! pg100.txt +baiting-place. pg100.txt +baize pg31100.txt, pg3200.txt +baize. pg31100.txt +bak'd. pg100.txt +bake; pg100.txt +baked pg3200.txt +baker pg3200.txt +baker's pg3200.txt +baker, pg3200.txt +baker. pg3200.txt +bakers' pg100.txt +bakers, pg3200.txt +bakery, pg3200.txt +bakeshops, pg3200.txt +baking pg3200.txt +balaam, pg3200.txt +balaclava pg3200.txt +balaklava pg3200.txt +balance pg31100.txt, pg3200.txt, pg100.txt +balance, pg3200.txt +balance-sheet pg3200.txt +balance. pg3200.txt +balance." pg3200.txt +balanced pg3200.txt +balances pg3200.txt +balances! pg3200.txt +balancing, pg3200.txt +balancing--and pg3200.txt +balconies pg3200.txt +balconies) pg3200.txt +balconies, pg3200.txt +balcony] pg100.txt +bald pg3200.txt, pg100.txt +bald, pg100.txt +bald-head. pg3200.txt +bald-headed pg3200.txt +bald-headed, pg3200.txt +bald-pate. pg100.txt +baldacchino--a pg3200.txt +baldest pg3200.txt +baldheaded pg3200.txt +baldwin pg3200.txt +baldwin's pg3200.txt +baldwin, pg31100.txt, pg3200.txt +baldwin." pg3200.txt +baldwinsville, pg3200.txt +bale pg3200.txt +bale. pg100.txt +baleful pg3200.txt +bales pg3200.txt +baling. pg3200.txt +balks pg3200.txt +ball pg31100.txt, pg3200.txt, pg100.txt +ball, pg31100.txt, pg3200.txt, pg100.txt +ball," pg31100.txt +ball-bearings! pg3200.txt +ball. pg31100.txt, pg3200.txt +ball." pg31100.txt +ball; pg100.txt +ball? pg3200.txt +ballad pg3200.txt, pg100.txt +ballad) pg3200.txt +ballad-makers. pg100.txt +ballad. pg100.txt +ballads pg100.txt +ballads. pg3200.txt +ballads? pg100.txt +ballarat pg3200.txt +ballarat--population pg3200.txt +ballarat. pg3200.txt +ballarat: pg3200.txt +ballast. pg3200.txt +ballasting pg3200.txt, pg100.txt +ballet-mongers. pg100.txt +balloon pg3200.txt +balloon" pg3200.txt +balloon. pg3200.txt +balloon." pg3200.txt +balloons; pg3200.txt +ballot pg3200.txt +ballot.' pg3200.txt +ballou pg3200.txt +ballow pg100.txt +ballroom pg31100.txt +ballroom, pg3200.txt +ballroom." pg31100.txt +ballroom; pg31100.txt +ballrooms, pg3200.txt +balls pg31100.txt, pg3200.txt +balls, pg3200.txt, pg100.txt +balls--unroofed pg3200.txt +balls. pg3200.txt, pg100.txt +balls?--was pg31100.txt +ballyhack pg3200.txt +balm pg31100.txt, pg3200.txt +balm, pg100.txt +balmat, pg3200.txt +balmat." pg3200.txt +balmat?" pg3200.txt +balmy pg3200.txt +balsam pg3200.txt +balthasar pg100.txt +balthasar, pg100.txt +balthasar. pg100.txt +balthasar? pg100.txt +balthasar]. pg100.txt +balthazar pg100.txt +baltimore. pg3200.txt +baltimore: pg3200.txt +balusters pg3200.txt +balusters, pg3200.txt +balustraded, pg3200.txt +balustrades pg3200.txt +balustrades, pg3200.txt +bamboo pg3200.txt +bamboo. pg3200.txt +ban pg3200.txt +ban, pg3200.txt +ban; pg100.txt +band pg3200.txt, pg100.txt +band, pg3200.txt, pg100.txt +band. pg3200.txt, pg100.txt +band; pg100.txt +band? pg100.txt +bandage pg3200.txt +bandage. pg3200.txt +bandages pg3200.txt +bandages, pg3200.txt +bandages. pg3200.txt +bandbox, pg3200.txt +banded pg3200.txt +bandied pg3200.txt +banditti pg31100.txt, pg100.txt +bandmaster pg3200.txt +bands pg3200.txt, pg100.txt +bands, pg100.txt +bands. pg100.txt +bands; pg100.txt +bandwagon pg3200.txt +bane pg100.txt +bane, pg100.txt +bane.' pg100.txt +baneful pg3200.txt +banes. pg100.txt +bang pg3200.txt +bang--bang, pg3200.txt +bang. pg3200.txt +bangalores, pg3200.txt +banged pg3200.txt +banging pg3200.txt +banging, pg31100.txt +bangs pg3200.txt +bangs; pg3200.txt +banish pg31100.txt, pg3200.txt, pg100.txt +banish'd pg100.txt +banish'd! pg100.txt +banish'd, pg100.txt +banish'd. pg100.txt +banish'd; pg100.txt +banished pg31100.txt, pg3200.txt, pg100.txt +banished! pg100.txt +banished'- pg100.txt +banished, pg3200.txt, pg100.txt +banished. pg100.txt +banished.' pg100.txt +banished; pg100.txt +banished? pg100.txt +banishers, pg100.txt +banishing pg3200.txt +banishment pg3200.txt, pg100.txt +banishment! pg100.txt +banishment, pg100.txt +banishment. pg3200.txt, pg100.txt +banishment; pg100.txt +banister, pg100.txt +banjoewangie-batavia,..........480 pg3200.txt +bank pg31100.txt, pg3200.txt, pg100.txt +bank! pg100.txt +bank, pg3200.txt, pg100.txt +bank--drawn pg3200.txt +bank-bill, pg3200.txt +bank-full pg3200.txt +bank-note pg3200.txt +bank-note, pg3200.txt +bank-notes, pg3200.txt +bank-safe pg3200.txt +bank-the-fires, pg3200.txt +bank. pg3200.txt +bank." pg3200.txt +bank?" pg3200.txt +banked pg3200.txt +banker pg3200.txt +banker's pg31100.txt +banker, pg3200.txt +banker. pg3200.txt +banker." pg3200.txt +bankers pg3200.txt +bankers. pg3200.txt +banking pg3200.txt +bankrupt pg3200.txt +bankrupt, pg3200.txt +bankrupt. pg3200.txt, pg100.txt +bankrupts pg3200.txt +banks pg31100.txt, pg3200.txt, pg100.txt +banks, pg3200.txt, pg100.txt +banks-- pg3200.txt +banks. pg3200.txt +banks.' pg3200.txt +banks; pg3200.txt +banks? pg3200.txt +banner pg3200.txt +banner. pg3200.txt +banner; pg3200.txt +bannercross pg3200.txt +bannered pg3200.txt +banners pg3200.txt +banners, pg3200.txt +banners. pg3200.txt +banners." pg3200.txt +banns. pg3200.txt +banns; pg100.txt +banquet pg3200.txt, pg100.txt +banquet, pg3200.txt +banquet-- pg100.txt +banquet--the pg3200.txt +banquet. pg3200.txt, pg100.txt +banqueted; pg100.txt +banqueting pg3200.txt, pg100.txt +banqueting-room pg3200.txt +banquets pg3200.txt +banquets--and pg3200.txt +banquo! pg100.txt +banquo, pg100.txt +banquo. pg100.txt +banquo." pg100.txt +banquo.? pg100.txt +banquo; pg100.txt +bans. pg100.txt +bantams; pg3200.txt +banter. pg3200.txt +bantering pg3200.txt +banyan pg3200.txt +banyan; pg3200.txt +baptism, pg3200.txt, pg100.txt +baptism. pg100.txt +baptism; pg100.txt +baptist pg3200.txt +baptist!" pg3200.txt +baptist, pg3200.txt +baptista pg100.txt +baptista. pg100.txt +baptista? pg100.txt +baptists, pg3200.txt +baptists." pg3200.txt +baptiz'd; pg100.txt +baptize pg3200.txt +baptized, pg3200.txt +baptized. pg3200.txt +baptizing. pg3200.txt +bar pg31100.txt, pg3200.txt, pg100.txt +bar'l pg3200.txt +bar'l, pg3200.txt +bar'l." pg3200.txt +bar'st pg100.txt +bar, pg3200.txt, pg100.txt +bar-keeper pg3200.txt +bar. pg3200.txt, pg100.txt +bar." pg3200.txt +bar.' pg3200.txt +barb pg3200.txt +barb, pg3200.txt +barbarian pg100.txt +barbarians pg3200.txt +barbarians, pg3200.txt +barbarians. pg3200.txt +barbaric pg3200.txt +barbarossa"; pg3200.txt +barbarous pg31100.txt, pg3200.txt +barbarous! pg100.txt +barbarous. pg100.txt +barbary, pg100.txt +barbary- pg100.txt +barbary; pg100.txt +barber pg3200.txt +barber's pg3200.txt +barber's? pg100.txt +barber, pg3200.txt +barber--and pg3200.txt +barber-shop." pg3200.txt +barber-surgeon, pg3200.txt +barber." pg3200.txt +barbered pg3200.txt +barbered, pg3200.txt +barbered. pg3200.txt +barberin',' pg3200.txt +barbermonger! pg100.txt +barbers pg3200.txt +barbers, pg3200.txt +barclay pg3200.txt +barclay" pg3200.txt +barclay, pg3200.txt +barclays, pg3200.txt +bardolph pg100.txt +bardolph! pg100.txt +bardolph, pg100.txt +bardolph. pg100.txt +bardolph.] pg100.txt +bardolph; pg100.txt +bardolph? pg100.txt +bardolph] pg100.txt +bare pg31100.txt, pg3200.txt, pg100.txt +bare! pg100.txt +bare, pg3200.txt +bare- pg100.txt +bare-headed, pg3200.txt +bare-kneed pg3200.txt +bare. pg3200.txt, pg100.txt +bared pg3200.txt +barefaced pg3200.txt +barefoot pg3200.txt, pg100.txt +barefooted pg3200.txt +barefooted, pg3200.txt +barefooted; pg3200.txt +barefooted?" pg3200.txt +bareheaded, pg3200.txt +bareheaded. pg3200.txt +bareheaded; pg3200.txt, pg100.txt +bareheaded? pg100.txt +barelegged, pg3200.txt +barely pg31100.txt, pg3200.txt +bareness. pg100.txt +bargain pg3200.txt +bargain! pg100.txt +bargain, pg3200.txt, pg100.txt +bargain--for pg3200.txt +bargain. pg3200.txt, pg100.txt +bargain." pg31100.txt, pg3200.txt +barge pg100.txt +barges pg3200.txt +barges--keelboats, pg3200.txt +baritone pg3200.txt +bark pg3200.txt +bark! pg100.txt +bark'd pg100.txt +bark, pg3200.txt, pg100.txt +bark-- pg3200.txt +bark. pg3200.txt, pg100.txt +bark.' pg100.txt +bark; pg3200.txt, pg100.txt +bark?" pg3200.txt +barked pg3200.txt +barkeep." pg3200.txt +barkeeper pg3200.txt +barkeeper. pg3200.txt +barkeeper." pg3200.txt +barkeeper: pg3200.txt +barkeepers. pg3200.txt +barking pg3200.txt, pg100.txt +barking, pg3200.txt +barking. pg3200.txt +barks. pg100.txt +barley pg3200.txt +barley-broth, pg100.txt +barlow, pg3200.txt +barm, pg100.txt +barn pg3200.txt +barn, pg3200.txt, pg100.txt +barnacled pg3200.txt +barnard's pg3200.txt +barnardine pg100.txt +barnardine! pg100.txt +barnardine. pg100.txt +barnardine? pg100.txt +barnes. pg100.txt +barnet pg100.txt +barney pg3200.txt +barns pg31100.txt, pg3200.txt +barnum pg3200.txt +barnum's pg3200.txt +barnum. pg3200.txt +baroda, pg3200.txt +baroda." pg3200.txt +barometer pg3200.txt +baron pg31100.txt, pg3200.txt, pg100.txt +barones pg3200.txt +baronet pg31100.txt, pg3200.txt +baronet, pg31100.txt, pg3200.txt +baronet. pg31100.txt +baronet." pg3200.txt +baronetage; pg31100.txt +baronetcies pg31100.txt +barons pg3200.txt, pg100.txt +barons, pg3200.txt +barouche, pg31100.txt +barouche-box pg31100.txt +barouche-landau pg31100.txt +barouche-landau." pg31100.txt +barouche. pg31100.txt, pg3200.txt +barouches pg3200.txt +barr'd pg100.txt +barr'd. pg100.txt +barrabas pg100.txt +barred pg3200.txt +barrel pg3200.txt +barrel, pg3200.txt +barrel-head pg3200.txt +barrel-hoop?" pg3200.txt +barrel. pg3200.txt +barrel; pg3200.txt +barrel? pg3200.txt, pg100.txt +barrels pg3200.txt +barrels. pg3200.txt +barrels." pg3200.txt +barrels.' pg3200.txt +barren pg31100.txt, pg3200.txt, pg100.txt +barren. pg100.txt +barrenness pg3200.txt +barrett. pg3200.txt +barricaded, pg3200.txt +barricado pg100.txt +barricadoes, pg100.txt +barrier pg3200.txt +barriers pg31100.txt +barriers. pg3200.txt +barring pg3200.txt +barrister pg3200.txt +barrow pg3200.txt, pg100.txt +barrow's pg3200.txt +barrow, pg3200.txt +barrow--" pg3200.txt +barrow--presently." pg3200.txt +barrow." pg3200.txt +barrow: pg3200.txt +barrow?" pg3200.txt +bars pg3200.txt, pg100.txt +bars, pg3200.txt, pg100.txt +bars. pg3200.txt +barson. pg100.txt +barstow pg3200.txt +barter pg3200.txt +bartley. pg3200.txt +barton pg31100.txt +barton, pg31100.txt +barton. pg31100.txt +barton." pg31100.txt +barton?" pg31100.txt +bas pg3200.txt +bas-reliefs, pg3200.txt +bascom." pg3200.txt +base pg3200.txt, pg100.txt +base! pg100.txt +base, pg3200.txt, pg100.txt +base- pg3200.txt +base-stone pg3200.txt +base. pg3200.txt, pg100.txt +base; pg100.txt +base? pg100.txt +baseball pg3200.txt +based pg3200.txt +basel. pg3200.txt +baseless pg3200.txt +basely pg3200.txt +basely, pg3200.txt +basement pg3200.txt +basement, pg3200.txt +baseness pg100.txt +baseness, pg100.txt +baseness. pg100.txt +baser pg3200.txt +bases pg3200.txt +basest. pg3200.txt +bashan pg3200.txt +bashful pg31100.txt +bashful._] pg31100.txt +basil pg3200.txt +basil, pg31100.txt +basilisco-like. pg100.txt +basilisk, pg100.txt +basilisk; pg100.txt +basilisks! pg100.txt +basilisks; pg100.txt +basimecu pg100.txt +basin pg31100.txt, pg3200.txt, pg100.txt +basin, pg3200.txt +basinghall pg3200.txt +basis pg3200.txt, pg100.txt +basis, pg3200.txt +basis. pg3200.txt +basis: pg3200.txt +basis; pg3200.txt +bask pg3200.txt +basket pg3200.txt, pg100.txt +basket! pg100.txt +basket!_" pg3200.txt +basket" pg3200.txt +basket, pg31100.txt, pg3200.txt +basket,) pg3200.txt +basket-maker. pg3200.txt +basket. pg3200.txt, pg100.txt +basket." pg31100.txt, pg3200.txt +basket; pg100.txt +basket] pg100.txt +basketful pg3200.txt +baskets pg3200.txt +baskets. pg3200.txt, pg100.txt +baskets." pg3200.txt +baskets?" pg3200.txt +basketsful." pg3200.txt +basking pg3200.txt +bass-string pg100.txt +bass. pg3200.txt, pg100.txt +bassanio pg100.txt +bassanio! pg100.txt +bassanio, pg100.txt +bassanio.- pg100.txt +bassano's pg3200.txt +basset pg100.txt +bassianus pg100.txt +bassianus. pg100.txt +bassianus; pg100.txt +bassianus? pg100.txt +bassianus] pg100.txt +bastard pg3200.txt, pg100.txt +bastard, pg100.txt +bastard. pg100.txt +bastard; pg100.txt +bastard? pg100.txt +bastard?" pg3200.txt +bastard] pg100.txt +bastardizing. pg100.txt +bastards! pg3200.txt, pg100.txt +bastards, pg100.txt +bastards. pg100.txt +bastardy pg100.txt +bastardy, pg100.txt +bastardy. pg100.txt +bastardy; pg100.txt +baste pg3200.txt +bastile. pg3200.txt +bastille pg3200.txt +bastille. pg3200.txt +bastilles pg3200.txt +bastilles. pg3200.txt +bastinado, pg3200.txt +bastinado. pg3200.txt +basting. pg100.txt +bat pg3200.txt +bat! pg3200.txt +bat!" pg3200.txt +bat, pg100.txt +bat. pg3200.txt +bat; pg3200.txt +batavia--cunard pg3200.txt +batavia-singapore,.............553 pg3200.txt +batch pg3200.txt +batches pg3200.txt +bate pg100.txt +bate. pg100.txt +bated pg3200.txt, pg100.txt +bated, pg100.txt +bates pg31100.txt +bates!" pg31100.txt +bates's pg31100.txt +bates's, pg31100.txt +bates, pg31100.txt, pg3200.txt +bates. pg31100.txt +bates." pg31100.txt +bates; pg31100.txt +bates?" pg31100.txt +bateses. pg31100.txt +bath pg31100.txt, pg3200.txt, pg100.txt +bath!" pg31100.txt +bath'd, pg100.txt +bath'd; pg100.txt +bath, pg31100.txt, pg3200.txt, pg100.txt +bath--half pg3200.txt +bath--much." pg31100.txt +bath-cure, pg3200.txt +bath-tub pg3200.txt +bath-tub. pg3200.txt +bath. pg31100.txt, pg3200.txt +bath." pg31100.txt, pg3200.txt +bath; pg31100.txt +bath? pg31100.txt +bath?" pg31100.txt, pg3200.txt +bathe pg3200.txt +bathe, pg3200.txt +bathed pg31100.txt +bathed, pg100.txt +bathers--i pg3200.txt +bathing pg31100.txt, pg3200.txt +bathing-houses, pg3200.txt +bathing." pg31100.txt +bathostic) pg3200.txt +bathroom pg3200.txt +bathroom. pg3200.txt +baths pg3200.txt +baths, pg3200.txt +baths. pg3200.txt +baths." pg3200.txt +baths..... pg3200.txt +bathtub pg3200.txt +bathtub, pg3200.txt +bathtubs, pg3200.txt +batler, pg100.txt +baton) pg3200.txt +baton; pg3200.txt +battalion pg3200.txt +battalions pg3200.txt +battalions, pg3200.txt +batter pg3200.txt, pg100.txt +batter, pg3200.txt +battered pg3200.txt +battered, pg3200.txt +batteries, pg3200.txt +battering pg3200.txt +battering-rams pg3200.txt +battery pg3200.txt, pg100.txt +battery, pg3200.txt +battery. pg3200.txt +battery." pg3200.txt +battle pg31100.txt, pg3200.txt, pg100.txt +battle! pg100.txt +battle, pg3200.txt, pg100.txt +battle--even pg3200.txt +battle-axe, pg100.txt +battle-axes." pg3200.txt +battle-field pg3200.txt +battle-field. pg3200.txt +battle-flag pg3200.txt +battle-flags pg3200.txt +battle-light, pg3200.txt +battle-picture." pg3200.txt +battle-plan, pg3200.txt +battle-plans pg3200.txt +battle-scarred, pg3200.txt +battle. pg3200.txt, pg100.txt +battle." pg31100.txt, pg3200.txt +battle: pg3200.txt, pg100.txt +battle; pg3200.txt, pg100.txt +battle? pg100.txt +battle?' pg3200.txt +battlefield pg100.txt +battlefield. pg3200.txt +battleground pg3200.txt +battleground. pg3200.txt +battlement pg3200.txt +battlemented pg3200.txt +battlements pg3200.txt, pg100.txt +battlements, pg100.txt +battlements. pg3200.txt, pg100.txt +battles pg3200.txt, pg100.txt +battles!" pg3200.txt +battles, pg3200.txt +bauble pg100.txt +bauble, pg3200.txt, pg100.txt +bavaria pg3200.txt +bavaria. pg3200.txt +bawcock! pg100.txt +bawd pg100.txt +bawd! pg100.txt +bawd. pg100.txt +bawd; pg100.txt +bawd? pg100.txt +bawdry. pg100.txt +bawds, pg100.txt +bawds. pg100.txt +bawdy pg100.txt +bawdy-house pg100.txt +bawling pg31100.txt, pg3200.txt +bawn pg3200.txt +bawn." pg3200.txt +baxter, pg3200.txt +baxter. pg3200.txt +bay pg3200.txt, pg100.txt +bay, pg100.txt +bay. pg3200.txt, pg100.txt +bay; pg3200.txt +bayard pg3200.txt +bayberries, pg3200.txt +bayings pg3200.txt +baynard's pg100.txt +bayonet. pg3200.txt +bayou.' pg3200.txt +bayreuth pg3200.txt +bayreuth. pg3200.txt +bayreuth; pg3200.txt +bays. pg3200.txt +bazaar pg3200.txt +bazaars pg3200.txt +bazar pg3200.txt +bb." pg3200.txt +be! pg31100.txt, pg3200.txt, pg100.txt +be!" pg31100.txt +be!' pg100.txt +be" pg31100.txt +be'in pg3200.txt +be'n pg3200.txt +be'st pg100.txt +be, pg31100.txt, pg3200.txt, pg100.txt +be," pg31100.txt, pg3200.txt +be,--she pg31100.txt +be- pg100.txt +be--" pg3200.txt +be--all-inclusive.' pg3200.txt +be--always." pg3200.txt +be--for pg31100.txt +be--not pg3200.txt +be--of pg31100.txt +be--perfect, pg3200.txt +be--that pg3200.txt +be--this!' pg3200.txt +be-but pg100.txt +be-dimm'd pg100.txt +be. pg31100.txt, pg3200.txt, pg100.txt +be." pg31100.txt, pg3200.txt +be.' pg100.txt +be.] pg3200.txt +be: pg3200.txt, pg100.txt +be:" pg3200.txt +be; pg31100.txt, pg3200.txt, pg100.txt +be? pg31100.txt, pg3200.txt, pg100.txt +be?" pg31100.txt, pg3200.txt +be?' pg31100.txt, pg3200.txt +be?- pg100.txt +be?--then pg31100.txt +beabsichtigt. pg3200.txt +beach pg100.txt +beach, pg31100.txt, pg3200.txt, pg100.txt +beach. pg3200.txt +beacon pg3200.txt +bead pg3200.txt +bead-tellings. pg3200.txt +beaded pg3200.txt +beadles pg100.txt +beads pg3200.txt +beads, pg3200.txt, pg100.txt +beads; pg100.txt +beads? pg100.txt +beadwork pg3200.txt +beadwork, pg3200.txt +beak pg3200.txt +beak, pg3200.txt, pg100.txt +beak? pg100.txt +beaker pg3200.txt +beaks pg100.txt +beam pg3200.txt, pg100.txt +beam, pg100.txt +beam. pg3200.txt +beamed pg3200.txt +beaming pg3200.txt +beaming: pg3200.txt +beams pg31100.txt, pg3200.txt, pg100.txt +beams! pg100.txt +beams, pg100.txt +beams. pg100.txt +beams: pg100.txt +beams; pg100.txt +bean pg3200.txt +bean, pg3200.txt +bean." pg3200.txt +beans pg3200.txt +beans, pg3200.txt +beans. pg3200.txt +bear pg31100.txt, pg3200.txt, pg100.txt +bear! pg100.txt +bear's pg3200.txt +bear'st pg100.txt +bear, pg3200.txt, pg100.txt +bear,' pg100.txt +bear,- pg100.txt +bear--but pg31100.txt +bear-baitings. pg100.txt +bear-whelp pg100.txt +bear. pg31100.txt, pg3200.txt, pg100.txt +bear." pg3200.txt +bear.] pg3200.txt +bear: pg100.txt +bear; pg3200.txt, pg100.txt +bear? pg100.txt +bearable. pg3200.txt +beard pg3200.txt, pg100.txt +beard! pg100.txt +beard, pg3200.txt, pg100.txt +beard- pg100.txt +beard. pg100.txt +beard.] pg100.txt +beard: pg100.txt +beard; pg100.txt +beard? pg100.txt +beard?- pg100.txt +bearded, pg3200.txt +beards pg3200.txt, pg100.txt +beards, pg100.txt +beards. pg3200.txt +bearer pg3200.txt +bearer. pg3200.txt +bearers pg3200.txt +bearest: pg100.txt +bearing pg31100.txt, pg3200.txt, pg100.txt +bearing, pg3200.txt, pg100.txt +bearing-cloth pg100.txt +bearing. pg3200.txt, pg100.txt +bearing: pg100.txt +bearings pg3200.txt +bearings, pg3200.txt +bearings--and pg3200.txt +bearings. pg3200.txt +bears pg3200.txt, pg100.txt +bears, pg3200.txt, pg100.txt +bears. pg3200.txt, pg100.txt +bears: pg100.txt +bears; pg100.txt +beast pg3200.txt, pg100.txt +beast! pg100.txt +beast!" pg3200.txt +beast, pg3200.txt, pg100.txt +beast-' pg100.txt +beast-la; pg3200.txt +beast. pg3200.txt, pg100.txt +beast: pg100.txt +beast; pg100.txt +beastialities. pg3200.txt +beastliness pg3200.txt +beastly pg100.txt +beasts pg31100.txt, pg3200.txt, pg100.txt +beasts! pg100.txt +beasts, pg3200.txt, pg100.txt +beasts. pg3200.txt, pg100.txt +beasts; pg100.txt +beasts? pg100.txt +beasts?" pg3200.txt +beat pg31100.txt, pg3200.txt, pg100.txt +beat!" pg3200.txt +beat, pg3200.txt, pg100.txt +beat-- pg100.txt +beat--" pg3200.txt +beat. pg3200.txt +beaten pg3200.txt, pg100.txt +beaten. pg3200.txt, pg100.txt +beaten? pg100.txt +beating pg3200.txt, pg100.txt +beating, pg3200.txt, pg100.txt +beating. pg3200.txt, pg100.txt +beating; pg100.txt +beatings, pg3200.txt +beatings." pg3200.txt +beatings?" pg3200.txt +beatitudes pg3200.txt +beatitudes; pg3200.txt +beatrice pg100.txt +beatrice! pg100.txt +beatrice, pg100.txt +beatrice-- pg100.txt +beatrice. pg100.txt +beatrice.] pg100.txt +beatrice? pg100.txt +beatrice]. pg100.txt +beats pg3200.txt, pg100.txt +beats] pg100.txt +beau pg31100.txt, pg100.txt +beau. pg100.txt +beau._] pg31100.txt +beaufort pg100.txt +beaugency pg3200.txt +beaugency." pg3200.txt +beaugency?" pg3200.txt +beaulieu pg3200.txt +beauteous pg100.txt +beauteous; pg100.txt +beauties pg31100.txt, pg3200.txt +beauties, pg31100.txt +beauties- pg100.txt +beauties. pg100.txt +beauties." pg31100.txt +beautified pg100.txt +beautiful pg31100.txt, pg3200.txt, pg100.txt +beautiful! pg31100.txt, pg3200.txt, pg100.txt +beautiful!" pg3200.txt +beautiful, pg31100.txt, pg3200.txt +beautiful-- pg3200.txt +beautiful--a pg3200.txt +beautiful--beautiful!' pg3200.txt +beautiful--every pg3200.txt +beautiful--one pg3200.txt +beautiful--superbly pg3200.txt +beautiful--would pg3200.txt +beautiful. pg3200.txt, pg100.txt +beautiful." pg31100.txt, pg3200.txt +beautiful.' pg3200.txt +beautiful; pg3200.txt, pg100.txt +beautiful? pg3200.txt, pg100.txt +beautiful?" pg3200.txt +beautifull." pg31100.txt +beautifully pg31100.txt, pg3200.txt +beautifully, pg3200.txt +beautify pg3200.txt +beauty pg31100.txt, pg3200.txt, pg100.txt +beauty! pg3200.txt, pg100.txt +beauty!" pg3200.txt +beauty"; pg3200.txt +beauty's pg100.txt +beauty, pg31100.txt, pg3200.txt, pg100.txt +beauty- pg100.txt +beauty--which pg3200.txt +beauty. pg31100.txt, pg3200.txt, pg100.txt +beauty." pg31100.txt +beauty; pg31100.txt, pg3200.txt, pg100.txt +beauty? pg100.txt +beauty?" pg3200.txt +beauvais. pg3200.txt +beauvais? pg3200.txt +beaux pg31100.txt +beaux--they pg31100.txt +beaver pg100.txt +beaver, pg100.txt +beavers" pg31100.txt +beca'se pg3200.txt +became pg31100.txt, pg3200.txt, pg100.txt +became, pg3200.txt +became. pg31100.txt, pg3200.txt +because pg31100.txt, pg3200.txt, pg100.txt +because, pg31100.txt, pg3200.txt +because- pg100.txt +because--" pg3200.txt +because--well, pg3200.txt +because: pg3200.txt +beck pg3200.txt +beck) pg100.txt +beck. pg100.txt +beck? pg100.txt +beckoned pg3200.txt +beckoning pg3200.txt +beckons! pg3200.txt +beckons.] pg100.txt +becky pg3200.txt +becky's pg3200.txt +becky--he pg3200.txt +becky--we're pg3200.txt +becky. pg3200.txt +becky." pg3200.txt +becky: pg3200.txt +becky?" pg3200.txt +become pg31100.txt, pg3200.txt, pg100.txt +become! pg3200.txt +become't, pg100.txt +become, pg3200.txt, pg100.txt +become. pg3200.txt, pg100.txt +become? pg100.txt +become?' pg100.txt +becomes pg31100.txt, pg3200.txt, pg100.txt +becomes, pg31100.txt, pg100.txt +becomes. pg100.txt +becomes; pg100.txt +becoming pg31100.txt, pg3200.txt, pg100.txt +becoming, pg31100.txt +becoming. pg31100.txt +becuz pg3200.txt +bed pg31100.txt, pg3200.txt, pg100.txt +bed! pg3200.txt, pg100.txt +bed" pg3200.txt +bed, pg31100.txt, pg3200.txt, pg100.txt +bed- pg3200.txt, pg100.txt +bed--no, pg3200.txt +bed--she pg3200.txt +bed-chamber, pg31100.txt, pg100.txt +bed-chamber--an pg3200.txt +bed-chamber. pg31100.txt, pg100.txt +bed-chamber; pg3200.txt +bed-chamber? pg3200.txt +bed-fellow. pg100.txt +bed-leg pg3200.txt +bed-rock, pg3200.txt +bed-shelves pg3200.txt +bed-side. pg3200.txt +bed-time. pg3200.txt +bed-time? pg100.txt +bed. pg31100.txt, pg3200.txt, pg100.txt +bed." pg31100.txt, pg3200.txt +bed.' pg3200.txt, pg100.txt +bed.'" pg31100.txt +bed: pg100.txt +bed:' pg31100.txt +bed; pg3200.txt, pg100.txt +bed? pg3200.txt, pg100.txt +bed?" pg31100.txt, pg3200.txt +bedchamber pg3200.txt, pg100.txt +bedchamber, pg3200.txt, pg100.txt +bedchamber. pg3200.txt, pg100.txt +bedclothes pg3200.txt, pg100.txt +bedclothes; pg3200.txt +bedded pg3200.txt, pg100.txt +bedded; pg100.txt +bedding pg3200.txt +bedew pg100.txt +bedfellow pg100.txt +bedfellow, pg100.txt +bedfellow. pg100.txt +bedfellow; pg100.txt +bedfellow? pg100.txt +bedford pg3200.txt +bedford, pg3200.txt, pg100.txt +bedford--do pg3200.txt +bedford. pg3200.txt +bedlam pg3200.txt, pg100.txt +bedlam. pg100.txt +bedouin pg3200.txt +bedouins pg3200.txt +bedouins--and pg3200.txt +bedouins. pg3200.txt +bedraggled pg3200.txt +bedraggled, pg3200.txt +bedrench pg100.txt +bedroom pg3200.txt +bedroom, pg3200.txt +bedroom. pg31100.txt +bedrooms pg3200.txt +bedrooms, pg31100.txt +bedrooms. pg3200.txt +bedrooms; pg3200.txt +beds pg3200.txt, pg100.txt +beds! pg3200.txt, pg100.txt +beds, pg3200.txt, pg100.txt +beds. pg3200.txt, pg100.txt +beds.' pg3200.txt +beds; pg100.txt +beds?" pg3200.txt +bedside pg3200.txt +bedside, pg3200.txt +bedstead pg3200.txt +bedstead. pg3200.txt +bedsteads pg3200.txt +bedtime, pg100.txt +bedward. pg100.txt +bee, pg3200.txt +bee-line, pg3200.txt +bee." pg3200.txt +beecher pg3200.txt +beecher,.brooklyn: pg3200.txt +beef pg31100.txt, pg3200.txt +beef, pg3200.txt, pg100.txt +beef--" pg3200.txt +beef-witted pg100.txt +beef. pg3200.txt, pg100.txt +beef." pg3200.txt +beef; pg3200.txt +beefsteak; pg3200.txt +beehive pg3200.txt +beehives. pg100.txt +been! pg31100.txt, pg3200.txt, pg100.txt +been!" pg31100.txt, pg3200.txt +been, pg31100.txt, pg3200.txt, pg100.txt +been," pg31100.txt +been,--saw pg31100.txt +been- pg100.txt +been--asked pg3200.txt +been--but pg31100.txt +been--hard pg31100.txt +been. pg31100.txt, pg3200.txt, pg100.txt +been." pg31100.txt +been.' pg3200.txt +been: pg3200.txt +been; pg31100.txt, pg3200.txt, pg100.txt +been? pg100.txt +been?" pg3200.txt +beens," pg3200.txt +beer pg3200.txt +beer, pg3200.txt +beer-hall pg3200.txt +beer. pg3200.txt, pg100.txt +beer; pg3200.txt +beer? pg100.txt +beer?" pg3200.txt +beers pg3200.txt +beersheba---say pg3200.txt +bees pg3200.txt, pg100.txt +bees, pg3200.txt, pg100.txt +bees. pg3200.txt +beest pg100.txt +beet-root, pg31100.txt +beetle pg3200.txt +beetle. pg100.txt +beetle; pg3200.txt +beets pg3200.txt +beeves. pg100.txt +befall pg3200.txt, pg100.txt +befall! pg100.txt +befall'n, pg100.txt +befall'n. pg100.txt +befall'n? pg100.txt +befall, pg100.txt +befall. pg100.txt +befallen pg3200.txt +befanden. pg3200.txt +befell pg31100.txt +befits pg100.txt +befits, pg100.txt +befitted pg3200.txt, pg100.txt +beflagged pg3200.txt +beflowered pg3200.txt +befo' pg3200.txt +befo'. pg3200.txt +before! pg3200.txt, pg100.txt +before!" pg3200.txt +before" pg3200.txt +before't. pg100.txt +before): pg3200.txt +before, pg31100.txt, pg3200.txt, pg100.txt +before,--neither pg3200.txt +before- pg100.txt +before--" pg31100.txt +before--and pg31100.txt, pg3200.txt +before--gathered pg31100.txt +before--in pg3200.txt +before--it pg3200.txt +before--steamboat pg3200.txt +before--that pg3200.txt +before--told pg3200.txt +before-breach pg100.txt +before-feet pg3200.txt +before-hand pg31100.txt +before. pg31100.txt, pg3200.txt, pg100.txt +before." pg31100.txt, pg3200.txt +before.' pg3200.txt +before.--in pg31100.txt +before.] pg3200.txt +before: pg3200.txt, pg100.txt +before; pg31100.txt, pg3200.txt, pg100.txt +before? pg31100.txt, pg3200.txt, pg100.txt +before?" pg31100.txt, pg3200.txt +before?' pg3200.txt +before] pg100.txt +beforehand pg31100.txt, pg3200.txt +beforehand, pg3200.txt +beforehand. pg31100.txt, pg3200.txt +beforehand." pg31100.txt, pg3200.txt +beforehand; pg3200.txt +beforehand? pg31100.txt +befriend pg3200.txt, pg100.txt +befriended pg3200.txt +befriended; pg100.txt +beg pg31100.txt, pg3200.txt, pg100.txt +beg, pg31100.txt, pg3200.txt, pg100.txt +beg- pg100.txt +beg. pg3200.txt, pg100.txt +beg." pg3200.txt +beg; pg31100.txt +began pg31100.txt, pg3200.txt, pg100.txt +began!" pg3200.txt +began't? pg100.txt +began, pg31100.txt, pg3200.txt, pg100.txt +began-- pg31100.txt, pg3200.txt +began. pg31100.txt, pg3200.txt +began: pg31100.txt, pg3200.txt +began? pg100.txt +begegnet. pg3200.txt +beget pg100.txt +beget! pg100.txt +beget. pg100.txt +begets pg3200.txt +begettable. pg3200.txt +begetting. pg100.txt +begg'd pg100.txt +begg'd! pg100.txt +begg'd. pg100.txt +beggar pg3200.txt, pg100.txt +beggar!" pg3200.txt +beggar'd, pg100.txt +beggar, pg3200.txt, pg100.txt +beggar-boy pg3200.txt +beggar-woman pg3200.txt +beggar. pg3200.txt, pg100.txt +beggar: pg100.txt +beggar; pg3200.txt +beggar? pg100.txt +beggar?" pg3200.txt +beggared; pg3200.txt +beggarly pg3200.txt +beggarly-nor pg100.txt +beggarly. pg100.txt +beggarly; pg100.txt +beggarman? pg100.txt +beggars pg3200.txt, pg100.txt +beggars! pg100.txt +beggars' pg3200.txt +beggars--he, pg3200.txt +beggars. pg3200.txt, pg100.txt +beggary pg3200.txt, pg100.txt +beggary- pg100.txt +beggary. pg3200.txt, pg100.txt +beggary.' pg100.txt +begged pg31100.txt, pg3200.txt +begged, pg3200.txt +begging pg31100.txt, pg3200.txt +begging. pg100.txt +begging." pg3200.txt +begin pg31100.txt, pg3200.txt, pg100.txt +begin! pg3200.txt +begin!" pg3200.txt +begin!' pg3200.txt +begin, pg3200.txt, pg100.txt +begin-- pg3200.txt +begin. pg31100.txt, pg3200.txt, pg100.txt +begin." pg3200.txt +begin.' pg100.txt +begin: pg3200.txt, pg100.txt +begin; pg31100.txt, pg100.txt +begin? pg100.txt +begin?" pg31100.txt, pg3200.txt +beginner pg31100.txt, pg3200.txt +beginning pg31100.txt, pg3200.txt +beginning, pg31100.txt, pg3200.txt +beginning--a pg3200.txt +beginning--once pg3200.txt +beginning-place pg3200.txt +beginning. pg31100.txt, pg3200.txt, pg100.txt +beginning." pg31100.txt, pg3200.txt +beginning: pg3200.txt +beginning; pg31100.txt, pg3200.txt +beginnings pg31100.txt, pg3200.txt +beginnings. pg3200.txt +begins pg31100.txt, pg3200.txt, pg100.txt +begins! pg3200.txt +begins, pg3200.txt, pg100.txt +begins--5. pg3200.txt +begins. pg3200.txt, pg100.txt +begone. pg100.txt +begot pg100.txt +begot, pg100.txt +begot. pg100.txt +begot; pg100.txt +begot? pg100.txt +begotten, pg3200.txt +begrimed pg3200.txt +begs pg100.txt +beguil'd pg100.txt +beguil'd! pg100.txt +beguil'd, pg100.txt +beguil'd. pg100.txt +beguile pg3200.txt, pg100.txt +beguile, pg100.txt +beguile; pg100.txt +beguiled pg3200.txt +beguiled, pg100.txt +beguilers," pg3200.txt +beguiling pg3200.txt, pg100.txt +beguiling; pg100.txt +begun pg31100.txt, pg3200.txt +begun! pg3200.txt +begun!" pg31100.txt +begun, pg31100.txt, pg3200.txt, pg100.txt +begun--with pg3200.txt +begun. pg31100.txt, pg3200.txt, pg100.txt +begun." pg31100.txt +begun: pg100.txt +begun:-- pg3200.txt +begun; pg100.txt +begun?-- pg31100.txt +behalf pg31100.txt, pg3200.txt, pg100.txt +behalf) pg100.txt +behalf, pg3200.txt, pg100.txt +behalf-- pg31100.txt +behalf--" pg3200.txt +behalf. pg3200.txt, pg100.txt +behalf; pg3200.txt, pg100.txt +behalfs; pg100.txt +behav'd, pg100.txt +behave pg31100.txt, pg3200.txt +behave! pg3200.txt +behave. pg31100.txt +behaved pg31100.txt +behaved; pg3200.txt +behavedst pg100.txt +behaveyour. pg3200.txt +behaving pg31100.txt +behaving, pg3200.txt +behavior pg3200.txt, pg100.txt +behavior. pg3200.txt +behaviors; pg100.txt +behaviour pg31100.txt, pg100.txt +behaviour!--well, pg31100.txt +behaviour, pg31100.txt, pg100.txt +behaviour. pg31100.txt, pg100.txt +behaviour.--hush!--you pg31100.txt +behaviour; pg31100.txt, pg100.txt +behaviour?" pg31100.txt +behaviours pg100.txt +behaviours; pg100.txt +beheaded pg31100.txt, pg3200.txt, pg100.txt +beheaded, pg3200.txt, pg100.txt +beheaded. pg100.txt +beheld pg31100.txt, pg3200.txt, pg100.txt +beheld! pg31100.txt, pg100.txt +beheld, pg31100.txt, pg100.txt +beheld- pg100.txt +beheld. pg3200.txt, pg100.txt +beheld." pg31100.txt, pg3200.txt +beheld; pg100.txt +behind pg31100.txt, pg3200.txt, pg100.txt +behind!" pg3200.txt +behind, pg31100.txt, pg3200.txt, pg100.txt +behind--$9,000,000. pg3200.txt +behind--mourning pg3200.txt +behind-door-work; pg100.txt +behind-foot, pg3200.txt +behind. pg31100.txt, pg3200.txt, pg100.txt +behind." pg3200.txt +behind; pg3200.txt, pg100.txt +behind?" pg3200.txt +behindhand pg31100.txt +behindhand. pg3200.txt +behine pg3200.txt +behold pg31100.txt, pg3200.txt, pg100.txt +behold! pg3200.txt, pg100.txt +behold!" pg3200.txt +behold's! pg100.txt +behold'st pg100.txt +behold'st. pg100.txt +behold, pg3200.txt, pg100.txt +behold- pg100.txt +behold. pg100.txt +behold.' pg3200.txt +behold? pg100.txt +beholde pg3200.txt +beholder pg3200.txt +beholders. pg3200.txt +beholdest, pg100.txt +beholding pg31100.txt, pg100.txt +beholding. pg100.txt +beholding; pg100.txt +beholds pg100.txt +behoof. pg100.txt +behove, pg100.txt +behring's pg3200.txt +beide. pg3200.txt +beim pg3200.txt +bein' pg3200.txt +being pg31100.txt, pg3200.txt, pg100.txt +being, pg3200.txt, pg100.txt +being," pg3200.txt +being,' pg3200.txt +being,) pg3200.txt +being--lived pg3200.txt +being. pg31100.txt, pg3200.txt +being." pg3200.txt +being.' pg3200.txt +being.] pg3200.txt +being: pg3200.txt +being; pg3200.txt +beings pg31100.txt, pg3200.txt +beings!' pg3200.txt +beings, pg31100.txt, pg3200.txt +beings," pg3200.txt +beings. pg3200.txt +beings." pg3200.txt +beings.' pg3200.txt +beings?" pg3200.txt +beira--impatience pg3200.txt +beirout. pg3200.txt +beirut pg3200.txt +beispiel, pg3200.txt +bekase pg3200.txt +belabored pg3200.txt +belarius pg100.txt +belarius. pg100.txt +belated pg3200.txt +belay! pg3200.txt +belch! pg100.txt +belched pg3200.txt +beleave pg3200.txt +beleive pg31100.txt +beleive." pg31100.txt +beleived pg31100.txt +belgia, pg100.txt +belgian, pg3200.txt +belgium, pg3200.txt +belial, pg3200.txt +belie pg3200.txt, pg100.txt +beliebe." pg3200.txt +belied pg3200.txt +belied! pg100.txt +belied, pg100.txt +belied; pg100.txt +belief pg31100.txt, pg3200.txt, pg100.txt +belief!" pg3200.txt +belief, pg31100.txt, pg3200.txt, pg100.txt +belief," pg3200.txt +belief--it pg3200.txt +belief. pg3200.txt, pg100.txt +belief." pg3200.txt +belief; pg3200.txt +beliefs pg3200.txt +beliefs. pg3200.txt +beliefs? pg3200.txt +believ'd pg100.txt +believ'd, pg100.txt +believ'd. pg100.txt +believ'st pg100.txt +believe pg31100.txt, pg3200.txt, pg100.txt +believe!" pg3200.txt +believe't. pg100.txt +believe't? pg100.txt +believe, pg31100.txt, pg3200.txt, pg100.txt +believe- pg100.txt +believe--" pg3200.txt +believe. pg31100.txt, pg3200.txt, pg100.txt +believe." pg31100.txt, pg3200.txt +believe: pg31100.txt, pg100.txt +believe; pg3200.txt +believe? pg3200.txt +believe?" pg3200.txt +believe[3]) pg3200.txt +believed pg31100.txt, pg3200.txt, pg100.txt +believed!" pg31100.txt +believed!'" pg3200.txt +believed, pg31100.txt, pg3200.txt +believed. pg31100.txt, pg3200.txt, pg100.txt +believed; pg31100.txt, pg3200.txt +believer pg3200.txt +believer. pg3200.txt +believes pg3200.txt, pg100.txt +believes; pg3200.txt +believeth pg3200.txt +believing pg31100.txt, pg3200.txt, pg100.txt +belike pg3200.txt +belike! pg100.txt +belike, pg100.txt +belike? pg100.txt +belittled pg3200.txt +bell pg3200.txt, pg100.txt +bell!" pg3200.txt +bell, pg31100.txt, pg3200.txt, pg100.txt +bell- pg100.txt +bell--answerit. pg31100.txt +bell--everything's pg3200.txt +bell-punch." pg3200.txt +bell. pg31100.txt, pg3200.txt, pg100.txt +bell; pg100.txt +bell?" pg31100.txt +bellamy pg3200.txt +bellario; pg100.txt +bellario? pg100.txt +belle pg3200.txt +belle's. pg31100.txt +belle, pg3200.txt +bellefontaine pg3200.txt +belles pg31100.txt, pg3200.txt +bellies. pg100.txt +bellman, pg100.txt +bellowed pg3200.txt, pg100.txt +bellowing pg3200.txt, pg100.txt +bellows pg3200.txt +bellows's pg3200.txt +bellows-mender pg3200.txt, pg100.txt +bellows-mender. pg100.txt +bells pg31100.txt, pg3200.txt +bells! pg3200.txt +bells, pg3200.txt, pg100.txt +bells--indeed, pg3200.txt +bells. pg3200.txt, pg100.txt +bells: pg100.txt +bellum, pg3200.txt +belly pg3200.txt, pg100.txt +belly! pg100.txt +belly, pg100.txt +belly. pg100.txt +belly; pg100.txt +belly? pg3200.txt, pg100.txt +bellyful pg100.txt +belmont pg100.txt +belmont. pg100.txt +belong pg31100.txt, pg3200.txt, pg100.txt +belong, pg100.txt +belong--all pg31100.txt +belong. pg3200.txt, pg100.txt +belong; pg100.txt +belong? pg3200.txt +belonged pg31100.txt, pg3200.txt +belonged, pg3200.txt +belonging pg31100.txt, pg3200.txt +belonging. pg31100.txt +belongings pg100.txt +belongings. pg3200.txt +belongings." pg3200.txt +belongs pg31100.txt, pg3200.txt, pg100.txt +belongs!' pg3200.txt +belongs, pg100.txt +belongs. pg3200.txt, pg100.txt +belongs." pg3200.txt +belongs: pg100.txt +belov'd pg100.txt +belov'd! pg100.txt +belov'd, pg100.txt +belov'd. pg100.txt +belov'd? pg100.txt +beloved pg31100.txt, pg3200.txt, pg100.txt +beloved, pg100.txt +beloved. pg3200.txt, pg100.txt +below pg31100.txt, pg3200.txt, pg100.txt +below! pg3200.txt +below, pg31100.txt, pg3200.txt, pg100.txt +below--and pg3200.txt +below. pg31100.txt, pg3200.txt, pg100.txt +below.' pg3200.txt +below.'" pg31100.txt +below; pg3200.txt +below?" pg3200.txt +belshazzar pg3200.txt +belt pg3200.txt +belt, pg3200.txt +belt. pg3200.txt +belt." pg3200.txt +belted pg3200.txt +belting pg3200.txt +belts pg3200.txt +bemet. pg100.txt +bemis pg3200.txt +bemis--salt pg3200.txt +ben pg3200.txt, pg100.txt +ben, pg3200.txt +benares pg3200.txt +benares, pg3200.txt +benares--the pg3200.txt +benares. pg3200.txt +benares; pg3200.txt +bench pg31100.txt, pg3200.txt, pg100.txt +bench, pg31100.txt, pg3200.txt, pg100.txt +bench-show pg3200.txt +bench-show. pg3200.txt +bench-shows, pg3200.txt +bench. pg3200.txt, pg100.txt +bench." pg3200.txt +benches pg31100.txt, pg3200.txt, pg100.txt +benches, pg3200.txt +benches. pg3200.txt, pg100.txt +benches; pg3200.txt +bend pg31100.txt, pg3200.txt, pg100.txt +bend, pg3200.txt, pg100.txt +bend. pg3200.txt +bend." pg3200.txt +bend; pg3200.txt, pg100.txt +bend?' pg3200.txt +bended pg3200.txt, pg100.txt +bendigo pg3200.txt +bendigo, pg3200.txt +bendigo. pg3200.txt +bending pg3200.txt, pg100.txt +bending! pg100.txt +bending? pg100.txt +bends pg3200.txt +bends. pg100.txt +beneath pg31100.txt, pg3200.txt +beneath. pg3200.txt, pg100.txt +benedicite! pg100.txt +benedick pg100.txt +benedick, pg100.txt +benedick. pg100.txt +benedick? pg100.txt +benediction pg3200.txt +benediction, pg3200.txt +benediction. pg3200.txt +benediction.) pg3200.txt +benediction; pg3200.txt +benefaction, pg3200.txt +benefaction. pg3200.txt +benefactions pg3200.txt +benefactor pg3200.txt +benefactor--" pg3200.txt +benefactor--to pg3200.txt +benefactors pg3200.txt +benefactors. pg100.txt +benefactress pg31100.txt +benefactress. pg3200.txt +benefice. pg100.txt +beneficence pg31100.txt +beneficences pg3200.txt +beneficent pg3200.txt +beneficial pg31100.txt, pg3200.txt +beneficial." pg3200.txt +beneficiaries pg3200.txt +beneficiary. pg3200.txt +benefit pg31100.txt, pg3200.txt, pg100.txt +benefit, pg3200.txt, pg100.txt +benefit. pg31100.txt, pg3200.txt, pg100.txt +benefit; pg100.txt +benefited pg31100.txt +benefited! pg100.txt +benefited, pg31100.txt +benefited? pg3200.txt +benefits pg3200.txt, pg100.txt +benefitted. pg3200.txt +benevolence pg31100.txt, pg3200.txt +benevolence, pg31100.txt +benevolence. pg3200.txt +benevolence; pg31100.txt +benevolence? pg3200.txt +benevolences, pg3200.txt +benevolences. pg3200.txt +benevolent pg31100.txt, pg3200.txt +benevolent. pg31100.txt +bengal pg3200.txt +bengal--calcutta. pg3200.txt +benicia pg3200.txt +benignant pg3200.txt +benignant, pg3200.txt +benignant. pg3200.txt +benignity pg3200.txt +benison. pg100.txt +benjamin pg3200.txt +bennet pg31100.txt +bennet's pg31100.txt +bennet, pg31100.txt +bennet. pg31100.txt +bennet." pg31100.txt +bennet; pg31100.txt +bennet?" pg31100.txt +bennets pg31100.txt +bennett pg3200.txt +bennett, pg3200.txt +benny pg3200.txt +benny. pg3200.txt +benoist pg3200.txt +benoist, pg3200.txt +bent pg31100.txt, pg3200.txt, pg100.txt +bent, pg3200.txt, pg100.txt +bent." pg3200.txt +bent; pg100.txt +bentivolii; pg100.txt +benton pg3200.txt +benton, pg3200.txt +benton. pg3200.txt +benumbing pg3200.txt +benvenuto pg3200.txt +benvenuto. pg3200.txt +benvolio, pg100.txt +benvolio. pg100.txt +benvolio]. pg100.txt +benwick pg31100.txt +benwick's pg31100.txt +benwick, pg31100.txt +benwick. pg31100.txt +benwick." pg31100.txt +benwick; pg31100.txt +bepatched pg3200.txt +bequeath pg31100.txt, pg100.txt +bequeath'd pg100.txt +bequeath, pg31100.txt, pg100.txt +bequeath; pg100.txt +bequeathed pg31100.txt, pg3200.txt, pg100.txt +bequest pg31100.txt, pg3200.txt +bequest. pg3200.txt +beranger; pg3200.txt +bereave pg100.txt +bereaved pg3200.txt +bereavement pg3200.txt +bereft, pg100.txt +bereft. pg100.txt +bereft; pg100.txt +bereien. pg3200.txt +bergamo, pg3200.txt +bergamo. pg100.txt +bergen pg3200.txt +bergen" pg3200.txt +bergen. pg3200.txt +bergen." pg3200.txt +bergen] pg3200.txt +bergheim pg3200.txt +beriah pg3200.txt +beriah?" pg3200.txt +berkeley pg31100.txt, pg3200.txt, pg100.txt +berkeley!" pg3200.txt +berkeley, pg3200.txt +berkeley. pg3200.txt, pg100.txt +berkeley?") pg3200.txt +berkshire pg3200.txt +berlichingen pg3200.txt +berlichingen's pg3200.txt +berlichingen, pg3200.txt +berlin pg3200.txt +berlin, pg3200.txt +berlin. pg3200.txt +berlin: pg3200.txt +bermuda pg3200.txt +bermuda, pg3200.txt +bermuda. pg3200.txt +bermudian pg3200.txt +bermudians, pg3200.txt +bernard pg3200.txt +bernard, pg3200.txt +bernardino pg3200.txt +bernardino. pg3200.txt +bernardo! pg100.txt +bernardo. pg100.txt +bernardo? pg100.txt +berne, pg3200.txt +bernhardt. pg3200.txt +berowne pg100.txt +berowne, pg100.txt +berowne. pg100.txt +berowne; pg100.txt +berri pg100.txt +berri, pg100.txt +berries; pg100.txt +berry." pg3200.txt +berth pg3200.txt +berth, pg3200.txt +berth. pg3200.txt +bertha; pg31100.txt +berths pg3200.txt +bertram pg31100.txt, pg100.txt +bertram! pg31100.txt +bertram's pg31100.txt +bertram's. pg100.txt +bertram, pg31100.txt, pg100.txt +bertram,' pg31100.txt +bertram--she pg31100.txt +bertram. pg31100.txt, pg100.txt +bertram.' pg100.txt +bertram? pg31100.txt +bertram] pg100.txt +bertrams pg31100.txt +bertrams, pg31100.txt +bertrand. pg3200.txt +bertrand." pg3200.txt +bertrand?" pg3200.txt +besant, pg3200.txt +besant. pg3200.txt +beseech pg3200.txt, pg100.txt +beseech'd, pg100.txt +beseech, pg100.txt +beseech. pg100.txt +beseeched pg3200.txt +beseeched, pg100.txt +beseeches pg3200.txt +beseeching pg3200.txt +beseechingly: pg3200.txt +beseem pg100.txt +beseems pg100.txt +beseen, pg3200.txt +beset pg31100.txt, pg3200.txt +beset! pg100.txt +beset. pg100.txt +beset." pg3200.txt +beset; pg100.txt +beside pg3200.txt, pg100.txt +beside, pg100.txt +beside. pg31100.txt, pg3200.txt, pg100.txt +beside." pg3200.txt +beside; pg100.txt +beside? pg100.txt +besides pg31100.txt, pg3200.txt, pg100.txt +besides, pg31100.txt, pg3200.txt, pg100.txt +besides--" pg3200.txt +besides. pg31100.txt, pg3200.txt +besides." pg3200.txt +besides; pg3200.txt +besides?" pg3200.txt +besieg'd, pg100.txt +besieg'd- pg100.txt +besieg'd; pg100.txt +besieged pg3200.txt +besmear'd pg100.txt +besmirch pg100.txt +besmirch'd pg100.txt +besom pg100.txt +besort pg100.txt +besought pg3200.txt +bespeak pg31100.txt +bespeak. pg100.txt +bespeak: pg100.txt +bespeaks pg3200.txt +bespoke. pg100.txt +bessemer pg3200.txt +best pg31100.txt, pg3200.txt, pg100.txt +best! pg100.txt +best, pg31100.txt, pg3200.txt, pg100.txt +best,' pg100.txt +best--pilots pg3200.txt +best-hated pg3200.txt +best-laid pg3200.txt +best. pg31100.txt, pg3200.txt, pg100.txt +best." pg31100.txt, pg3200.txt +best.' pg3200.txt +best: pg100.txt +best; pg31100.txt, pg3200.txt, pg100.txt +best? pg100.txt +best?" pg3200.txt +bested, pg100.txt +bestialities pg3200.txt +bestow pg31100.txt, pg3200.txt, pg100.txt +bestow!--how pg31100.txt +bestow'd pg100.txt +bestow'd! pg100.txt +bestow'd. pg100.txt +bestow'd; pg100.txt +bestow'd? pg100.txt +bestow'st, pg100.txt +bestow, pg31100.txt, pg100.txt +bestow- pg100.txt +bestow. pg31100.txt, pg100.txt +bestow." pg31100.txt +bestowed pg31100.txt, pg3200.txt, pg100.txt +bestowed! pg100.txt +bestowed, pg100.txt +bestowed; pg31100.txt +bestowing pg31100.txt, pg100.txt +bestowing, pg100.txt +bestows pg100.txt +bestows. pg3200.txt +bestrew pg100.txt +bestrid pg100.txt +bestrid, pg100.txt +bet pg3200.txt +bet! pg3200.txt +bet!" pg3200.txt +bet. pg3200.txt +bet." pg3200.txt +bet.' pg3200.txt +betake pg3200.txt +betaught pg3200.txt +bete-la; pg3200.txt +bethel, pg3200.txt +bethesda. pg3200.txt +bethink pg100.txt +bethlehem pg3200.txt +bethlehem, pg3200.txt +bethlehem--church pg3200.txt +bethought pg3200.txt, pg100.txt +bethought! pg100.txt +bethought!" pg3200.txt +bethsaida pg3200.txt +betid; pg100.txt +betide pg100.txt +betide. pg100.txt +betideth pg100.txt +betime, pg100.txt +betime; pg100.txt +betimes pg100.txt +betimes, pg100.txt +betimes. pg100.txt +betimes; pg31100.txt, pg100.txt +betoken pg100.txt +betook pg100.txt +betray pg31100.txt, pg3200.txt, pg100.txt +betray'd pg100.txt +betray'd. pg100.txt +betray's pg100.txt +betray, pg100.txt +betray." pg31100.txt, pg3200.txt +betrayal pg3200.txt +betrayed pg31100.txt, pg3200.txt, pg100.txt +betrayed. pg3200.txt +betrayed; pg31100.txt +betraying pg31100.txt, pg3200.txt, pg100.txt +betrays pg3200.txt +betrims, pg100.txt +betroth'd pg100.txt +betrothal. pg3200.txt +bets pg31100.txt, pg3200.txt +betsey pg31100.txt, pg3200.txt +betsey, pg31100.txt, pg3200.txt +betsey? pg31100.txt +betsy, pg3200.txt +bett pg3200.txt +better pg31100.txt, pg3200.txt, pg100.txt +better! pg3200.txt +better!--anybody pg3200.txt +better'n pg3200.txt +better, pg31100.txt, pg3200.txt, pg100.txt +better," pg31100.txt +better-- pg3200.txt +better--" pg3200.txt +better. pg31100.txt, pg3200.txt, pg100.txt +better." pg31100.txt, pg3200.txt +better.' pg3200.txt +better: pg3200.txt +better; pg31100.txt, pg3200.txt, pg100.txt +better? pg3200.txt, pg100.txt +bettered pg3200.txt +bettering pg3200.txt +betters pg3200.txt, pg100.txt +betters. pg3200.txt, pg100.txt +betters." pg3200.txt +betting pg3200.txt +betting? pg100.txt +betts, pg3200.txt +betty pg31100.txt +between pg31100.txt, pg3200.txt, pg100.txt +between! pg3200.txt +between, pg3200.txt, pg100.txt +between- pg100.txt +between--yes." pg3200.txt +between. pg3200.txt +between." pg3200.txt +between; pg100.txt +between? pg3200.txt +betwix pg3200.txt +betwixt pg3200.txt, pg100.txt +bevel; pg100.txt +beveled." pg3200.txt +beverage, pg100.txt +beverage. pg3200.txt +beverages pg3200.txt +bewail. pg100.txt +bewailed pg3200.txt +beware pg31100.txt, pg100.txt +beware!" pg3200.txt +beware. pg100.txt +beware: pg100.txt +bewet, pg100.txt +bewildered pg31100.txt, pg3200.txt +bewildered--and pg31100.txt +bewildering pg3200.txt +bewilderment pg3200.txt +bewitch'd pg100.txt +bewitched, pg3200.txt +bewitched. pg100.txt +bewitching pg31100.txt, pg3200.txt +bewitching. pg3200.txt +bewray pg100.txt +beyond pg31100.txt, pg3200.txt, pg100.txt +beyond, pg3200.txt, pg100.txt +beyond--" pg31100.txt +beyond--fifty pg3200.txt +beyond--westminster. pg3200.txt +beyond. pg31100.txt, pg3200.txt +beyond." pg31100.txt +bezonians: pg100.txt +bhopal." pg3200.txt +bhopal; pg3200.txt +bhowanee pg3200.txt +bhudpoor." pg3200.txt +bianca pg100.txt +bianca, pg100.txt +bianca. pg100.txt +bianca; pg100.txt +bianca? pg100.txt +bianca] pg100.txt +bias pg31100.txt, pg100.txt +bias, pg100.txt +bias-drawing, pg100.txt +bias. pg100.txt +biased, pg31100.txt +bibble-babble. pg100.txt +bible pg3200.txt +bible's--beautiful pg3200.txt +bible, pg3200.txt +bible-annex pg3200.txt +bible. pg3200.txt +bible." pg3200.txt +bible?" pg3200.txt +bible?' pg3200.txt +bibles pg3200.txt +bibles. pg3200.txt +biblical pg3200.txt +bibliography pg3200.txt +bickerings. pg100.txt +bicycle pg3200.txt +bicycle. pg3200.txt +bicycles! pg3200.txt +bid pg31100.txt, pg3200.txt, pg100.txt +bid! pg3200.txt +bid. pg3200.txt, pg100.txt +bid? pg31100.txt +bidding pg31100.txt, pg3200.txt +bidding, pg100.txt +bidding. pg100.txt +bidding; pg3200.txt +bidding? pg100.txt +bide pg31100.txt, pg3200.txt, pg100.txt +bide, pg100.txt +bide- pg100.txt +bide. pg100.txt +bide? pg100.txt +bided pg3200.txt +bides, pg100.txt +bides. pg100.txt +bides; pg100.txt +biding. pg100.txt +bids pg31100.txt, pg100.txt +bids. pg3200.txt +bien pg3200.txt, pg100.txt +biennial pg3200.txt +bier pg100.txt +bier! pg100.txt +bier. pg3200.txt +big pg3200.txt, pg100.txt +big, pg3200.txt +big- pg100.txt +big? pg100.txt +bigamy, pg3200.txt +bigamy. pg3200.txt, pg100.txt +bigelow pg3200.txt +bigger pg3200.txt, pg100.txt +bigger--" pg3200.txt +bigger----" pg3200.txt +bigger. pg100.txt +biggest pg3200.txt +bigler pg3200.txt +bigler, pg3200.txt +bigler. pg3200.txt +bigler; pg3200.txt +bigot pg100.txt +bigoted pg3200.txt +bigoted, pg3200.txt +bijou pg3200.txt +bilberry; pg100.txt +bilbo pg100.txt +bilbo. pg100.txt +bilbow. pg100.txt +biler-factry pg3200.txt +bilgewater pg3200.txt +bilgewater," pg3200.txt +bilgewater?" pg3200.txt +bilin'. pg3200.txt +bilious, pg31100.txt +bilk pg3200.txt +bilk! pg3200.txt +bill pg3200.txt, pg100.txt +bill! pg31100.txt, pg3200.txt +bill!" pg3200.txt +bill!' pg3200.txt +bill's pg3200.txt +bill, pg3200.txt, pg100.txt +bill--and pg31100.txt +bill-head pg3200.txt +bill. pg31100.txt, pg3200.txt, pg100.txt +bill." pg3200.txt +bill.' pg3200.txt +bill.--and pg3200.txt +bill: pg3200.txt +bill; pg3200.txt +bill?" pg3200.txt +billed pg3200.txt +billet pg3200.txt +billeted. pg100.txt +billfinger! pg3200.txt +billfinger, pg3200.txt +billiard pg3200.txt +billiard- pg3200.txt +billiard-balls pg3200.txt +billiard-room pg3200.txt +billiard-room, pg31100.txt, pg3200.txt +billiard-room. pg31100.txt +billiard-table pg3200.txt +billiard-table, pg31100.txt, pg3200.txt +billiard-table. pg3200.txt +billiard-tables, pg3200.txt +billiardist pg3200.txt +billiards pg3200.txt +billiards--and pg3200.txt +billie pg3200.txt +billings's pg3200.txt +billingsgate pg3200.txt +billingsgate--" pg3200.txt +billion. pg3200.txt +billions pg3200.txt +billows pg3200.txt +billowy pg3200.txt +bills pg3200.txt, pg100.txt +bills! pg3200.txt +bills, pg100.txt +bills--nigger pg3200.txt +bills. pg3200.txt, pg100.txt +bills? pg100.txt +bills?" pg3200.txt +billson pg3200.txt +billson!'" pg3200.txt +billson." pg3200.txt +billy pg3200.txt +billy, pg3200.txt +bin pg3200.txt +bin, pg100.txt +binary, pg31100.txt, pg3200.txt, pg100.txt +bind pg3200.txt, pg100.txt +bind, pg100.txt +bind. pg100.txt +binderies pg3200.txt +bindeth pg100.txt +binding." pg31100.txt +binds pg3200.txt +bine, pg3200.txt +bing'd pg3200.txt +bingley pg31100.txt +bingley" pg31100.txt +bingley's pg31100.txt +bingley's, pg31100.txt +bingley, pg31100.txt +bingley. pg31100.txt +bingley." pg31100.txt +bingley: pg31100.txt +bingley; pg31100.txt +bingley?" pg31100.txt +bingleys. pg31100.txt +binnum pg3200.txt +bins, pg3200.txt +biographer pg3200.txt +biographer's pg3200.txt +biographer, pg31100.txt, pg3200.txt +biographer. pg31100.txt, pg3200.txt +biographers pg3200.txt +biographies pg3200.txt +biography pg3200.txt +biography, pg3200.txt +biography--no, pg3200.txt +biography. pg3200.txt +biography.' pg3200.txt +biology, pg3200.txt +biondello pg100.txt +biondello, pg100.txt +biondello. pg100.txt +biondello? pg100.txt +biondello] pg100.txt +birch pg3200.txt +birch, pg100.txt +bird pg3200.txt, pg100.txt +bird! pg100.txt +bird's pg3200.txt +bird, pg3200.txt, pg100.txt +bird. pg3200.txt, pg100.txt +bird." pg3200.txt +bird? pg100.txt +birding-pieces. pg100.txt +birds pg31100.txt, pg3200.txt, pg100.txt +birds! pg3200.txt +birds, pg3200.txt, pg100.txt +birds--the pg3200.txt +birds. pg3200.txt +birds." pg3200.txt +birds?' pg3200.txt +birdsall pg3200.txt +birnam. pg100.txt +birth pg31100.txt, pg3200.txt, pg100.txt +birth! pg3200.txt, pg100.txt +birth, pg3200.txt, pg100.txt +birth- pg100.txt +birth-- pg31100.txt +birth--" pg3200.txt +birth--'" pg3200.txt +birth-century pg3200.txt +birth-day; pg3200.txt +birth. pg31100.txt, pg3200.txt, pg100.txt +birth." pg3200.txt +birth; pg100.txt +birth? pg3200.txt, pg100.txt +birth?" pg3200.txt +birthday pg31100.txt, pg3200.txt +birthday, pg3200.txt +birthday--" pg3200.txt +birthday--because pg3200.txt +birthday. pg3200.txt, pg100.txt +birthday." pg3200.txt +birthday: pg3200.txt +birthday; pg3200.txt +birthplace pg3200.txt +birthright pg3200.txt +birthrights. pg3200.txt +births pg3200.txt +births, pg100.txt +births. pg100.txt +biscuit pg100.txt +biscuit-crumbs.' pg3200.txt +biscuit. pg100.txt +biscuits pg31100.txt +bishop pg31100.txt, pg3200.txt, pg100.txt +bishop, pg3200.txt, pg100.txt +bishop. pg3200.txt +bishops pg3200.txt, pg100.txt +bishops, pg3200.txt, pg100.txt +bishops. pg100.txt +bishops; pg3200.txt +bismarck's, pg3200.txt +bismarck's. pg3200.txt +bismarck. pg3200.txt +bit pg31100.txt, pg3200.txt, pg100.txt +bit, pg3200.txt +bit--no pg3200.txt +bit. pg3200.txt, pg100.txt +bit." pg3200.txt +bitch; pg100.txt +bitche. pg3200.txt +bite pg3200.txt, pg100.txt +bite, pg3200.txt +bite--muzaffurpore pg3200.txt +bite--they pg3200.txt +bite. pg3200.txt, pg100.txt +bite; pg100.txt +bite? pg100.txt +bites pg3200.txt +bites, pg3200.txt, pg100.txt +biting pg3200.txt, pg100.txt +bitingly: pg3200.txt +bits pg3200.txt, pg100.txt +bits. pg100.txt +bitt'rest. pg100.txt +bitten, pg3200.txt +bitter pg3200.txt, pg100.txt +bitter, pg3200.txt, pg100.txt +bitter. pg100.txt +bitterest pg3200.txt +bitterest, pg3200.txt +bitterly pg31100.txt, pg3200.txt +bitterly. pg3200.txt, pg100.txt +bitterness pg31100.txt, pg3200.txt, pg100.txt +bitterness, pg3200.txt +bitterness. pg3200.txt, pg100.txt +bitterness: pg3200.txt +bitterness? pg100.txt +bixby pg3200.txt +bixby, pg3200.txt +bixby? pg3200.txt +bixby?' pg3200.txt +bizarre pg3200.txt +blab- pg100.txt +black pg31100.txt, pg3200.txt, pg100.txt +black! pg100.txt +black!" pg31100.txt +black, pg3200.txt, pg100.txt +black- pg100.txt +black--" pg3200.txt +black-bearded, pg3200.txt +black-eyed pg3200.txt +black-foresty.) pg3200.txt +black-hearted pg3200.txt +black-hearted, pg3200.txt +black-list pg3200.txt +black. pg3200.txt, pg100.txt +black." pg3200.txt +black? pg100.txt +black?" pg3200.txt +blackamoors pg100.txt +blackballed!'" pg3200.txt +blackberry pg3200.txt +blackbird. pg3200.txt +blackbird; pg3200.txt +blackboard pg3200.txt +blacked pg3200.txt +blackened pg3200.txt +blacker pg3200.txt +blacker! pg100.txt +blackest pg31100.txt, pg3200.txt +blackfeet pg3200.txt +blackfriars pg100.txt +blackfriars; pg100.txt +blackguarded pg3200.txt +blackguarding pg3200.txt +blackguards pg3200.txt +blackheath pg100.txt +blackheath; pg100.txt +blacklegs, pg3200.txt +blackmail pg3200.txt +blackness pg3200.txt +blackness. pg3200.txt, pg100.txt +blacks pg3200.txt +blacks." pg3200.txt +blacksmith pg3200.txt +blacksmith! pg3200.txt +blacksmith, pg3200.txt +blacksmith--well, pg3200.txt +blacksmith." pg3200.txt +blacksmith: pg3200.txt +blacksmith; pg3200.txt +bladder. pg3200.txt +bladders, pg100.txt +blade pg3200.txt, pg100.txt +blade, pg3200.txt, pg100.txt +blade-bone. pg3200.txt +blades, pg100.txt +blaggard!" pg3200.txt +blaine pg3200.txt +blaine," pg3200.txt +blaine. pg3200.txt +blains, pg100.txt +blaize pg31100.txt +blake pg3200.txt +blakely. pg3200.txt +blame pg31100.txt, pg3200.txt, pg100.txt +blame, pg31100.txt, pg3200.txt, pg100.txt +blame. pg3200.txt, pg100.txt +blame." pg3200.txt +blame; pg31100.txt, pg100.txt +blame? pg100.txt +blame?" pg3200.txt +blamed pg3200.txt +blamed, pg31100.txt +blameless pg3200.txt +blameless, pg31100.txt +blameless. pg31100.txt +blameless." pg3200.txt +blameless; pg31100.txt, pg100.txt +blamelessly. pg31100.txt +blamelessly." pg31100.txt +blames, pg100.txt +blaming pg3200.txt +blanc pg3200.txt +blanc, pg3200.txt +blanc--so pg3200.txt +blanc. pg3200.txt +blanca pg100.txt +blanch, pg100.txt +blanch? pg100.txt +blanch] pg100.txt +blanche?" pg3200.txt +blanched, pg3200.txt +blanchemains, pg3200.txt +bland pg3200.txt +bland?" pg3200.txt +blandest pg3200.txt +blandly pg3200.txt +blank pg31100.txt, pg3200.txt, pg100.txt +blank, pg100.txt +blank--"12.14." pg3200.txt +blank. pg31100.txt, pg3200.txt +blanket pg3200.txt +blanket, pg100.txt +blanket. pg3200.txt, pg100.txt +blanket; pg3200.txt +blankets pg3200.txt +blankets, pg3200.txt +blankness pg3200.txt +blanks pg31100.txt, pg3200.txt, pg100.txt +blas," pg3200.txt +blasphemies. pg3200.txt +blasphemies." pg3200.txt +blaspheming, pg3200.txt +blaspheming. pg3200.txt +blasphemous pg3200.txt +blasphemous, pg100.txt +blasphemous,--i pg3200.txt +blasphemy, pg3200.txt, pg100.txt +blasphemy. pg3200.txt, pg100.txt +blasphemy." pg3200.txt +blast pg31100.txt, pg3200.txt +blast, pg100.txt +blast. pg3200.txt, pg100.txt +blast." pg3200.txt +blast; pg31100.txt +blasted pg3200.txt +blasted. pg100.txt +blasts pg3200.txt +blasts. pg100.txt +blasts; pg3200.txt +blatherskite pg3200.txt +blatherskite!" pg3200.txt +blatherskites, pg3200.txt +blaze pg3200.txt, pg100.txt +blaze." pg3200.txt +blazed pg3200.txt +blazed. pg3200.txt +blazes pg3200.txt +blazes, pg3200.txt +blazing pg3200.txt, pg100.txt +blazon pg100.txt +blazon'st pg100.txt +bleached-out, pg3200.txt +bleaching. pg100.txt +bleak pg3200.txt +blear-eyed pg3200.txt +bleat. pg100.txt +bleats. pg100.txt +bled, pg100.txt +bled.'" pg3200.txt +bleed pg3200.txt +bleed'st; pg100.txt +bleed, pg100.txt +bleed. pg3200.txt, pg100.txt +bleed; pg100.txt +bleed? pg100.txt +bleeding pg3200.txt +bleeding, pg100.txt +bleeds pg100.txt +bleeds! pg100.txt +bleiben. pg3200.txt +bleiben? pg3200.txt +blemish pg3200.txt +blemish, pg3200.txt, pg100.txt +blemish--was pg3200.txt +blemishes, pg100.txt +blemishes--and pg3200.txt +blemishless pg3200.txt +blench pg3200.txt, pg100.txt +blench, pg3200.txt, pg100.txt +blench? pg100.txt +blenched. pg3200.txt +blend pg3200.txt, pg100.txt +blend, pg3200.txt +blend--heroism, pg3200.txt +blended pg3200.txt +blending. pg3200.txt +blendings pg3200.txt +blent pg3200.txt +bless pg31100.txt, pg3200.txt, pg100.txt +bless! pg100.txt +bless'd pg100.txt +bless'd; pg3200.txt +bless, pg100.txt +bless--" pg3200.txt +bless. pg3200.txt +blessed pg31100.txt, pg3200.txt, pg100.txt +blessed. pg3200.txt +blessed." pg3200.txt +blessedly pg100.txt +blessedness. pg3200.txt, pg100.txt +blesses pg3200.txt +blesses, pg100.txt +blessin'?" pg3200.txt +blessing pg31100.txt, pg3200.txt, pg100.txt +blessing, pg31100.txt, pg3200.txt, pg100.txt +blessing--not pg3200.txt +blessing. pg31100.txt, pg3200.txt, pg100.txt +blessing." pg31100.txt, pg3200.txt +blessing.' pg100.txt +blessing; pg31100.txt, pg3200.txt, pg100.txt +blessings pg31100.txt, pg3200.txt, pg100.txt +blessings, pg3200.txt, pg100.txt +blessings. pg3200.txt, pg100.txt +blessings; pg100.txt +blest pg3200.txt, pg100.txt +blest! pg100.txt +blest!-- pg3200.txt +blest, pg100.txt +blest. pg100.txt +blest." pg3200.txt +blest.' pg3200.txt +blest: pg100.txt +blest; pg3200.txt, pg100.txt +blethering pg3200.txt +blew pg3200.txt +blew, pg3200.txt +blighted pg3200.txt +blighted; pg3200.txt +blighting pg3200.txt +blind pg31100.txt, pg3200.txt, pg100.txt +blind!" pg3200.txt +blind, pg3200.txt, pg100.txt +blind. pg31100.txt, pg3200.txt, pg100.txt +blind; pg3200.txt, pg100.txt +blind? pg100.txt +blinded pg31100.txt +blinded, pg31100.txt +blinder pg3200.txt +blinder. pg31100.txt +blindfolded pg3200.txt +blinding pg3200.txt +blindness pg31100.txt, pg3200.txt +blindness, pg100.txt +blindness. pg31100.txt +blindness." pg31100.txt +blindness; pg100.txt +blinds pg100.txt +bline." pg3200.txt +blink, pg3200.txt +bliss pg3200.txt, pg100.txt +bliss! pg100.txt +bliss's pg3200.txt +bliss, pg100.txt +bliss. pg3200.txt, pg100.txt +bliss; pg3200.txt +bliss?" pg3200.txt +blissful pg3200.txt +blissfully pg3200.txt +blister pg31100.txt, pg3200.txt +blister, pg100.txt +blister-plasters. pg3200.txt +blister. pg3200.txt +blistered pg3200.txt +blistered, pg3200.txt +blistering pg3200.txt +blistering! pg3200.txt +blisters pg3200.txt +blivens, pg3200.txt +block pg3200.txt, pg100.txt +block!" pg3200.txt +block, pg3200.txt +block-head; pg100.txt +block. pg3200.txt, pg100.txt +blockade pg3200.txt +blockaded pg3200.txt +blocked pg3200.txt +blocked. pg3200.txt +blockhead pg31100.txt +blockheaded pg3200.txt +blockheaded. pg3200.txt +blocks pg3200.txt +blocks, pg3200.txt +blodgett, pg3200.txt +blois--the pg3200.txt +blois; pg3200.txt +bloke pg3200.txt +blond pg3200.txt +blond; pg3200.txt +blonde pg3200.txt +blonde's pg3200.txt +blondin pg3200.txt +blood pg31100.txt, pg3200.txt, pg100.txt +blood! pg3200.txt, pg100.txt +blood!" pg3200.txt +blood" pg3200.txt +blood, pg31100.txt, pg3200.txt, pg100.txt +blood- pg3200.txt, pg100.txt +blood--battle pg3200.txt +blood--hey, pg3200.txt +blood--insomuch pg3200.txt +blood--saw pg3200.txt +blood-curdling pg3200.txt +blood-heat, pg3200.txt +blood-heat. pg3200.txt +blood-kin pg3200.txt +blood-mark pg3200.txt +blood-relations, pg3200.txt +blood-relative pg3200.txt +blood-soaked pg3200.txt +blood-stirring pg3200.txt +blood-suckers. pg100.txt +blood-thirsty pg3200.txt +blood. pg31100.txt, pg3200.txt, pg100.txt +blood." pg3200.txt +blood: pg100.txt +blood; pg3200.txt, pg100.txt +blood? pg100.txt +blood?" pg3200.txt +blooded pg3200.txt +bloodhound!" pg3200.txt +bloodhound. pg3200.txt, pg100.txt +bloodhounds pg3200.txt +bloodless, pg100.txt +bloods pg100.txt +bloods! pg100.txt +bloods, pg100.txt +bloods. pg100.txt +bloodshed pg3200.txt +bloodshed, pg100.txt +bloodshed--" pg3200.txt +bloodshed." pg3200.txt +bloodshed? pg3200.txt +bloodshedding, pg100.txt +bloodthirsty." pg3200.txt +bloody pg3200.txt, pg100.txt +bloody, pg100.txt +bloody-minded; pg100.txt +bloody-scepter'd, pg100.txt +bloody. pg3200.txt, pg100.txt +bloom pg3200.txt +bloom! pg3200.txt +bloom, pg31100.txt, pg3200.txt +bloom--how pg3200.txt +bloom. pg3200.txt +blooming pg31100.txt, pg3200.txt +blooming, pg31100.txt +bloomy pg3200.txt +blossom pg3200.txt +blossom, pg3200.txt +blossoming pg100.txt +blossoms pg3200.txt, pg100.txt +blossoms. pg31100.txt, pg3200.txt +blot pg3200.txt, pg100.txt +blot, pg100.txt +blot. pg100.txt +blot; pg100.txt +blot? pg100.txt +blots pg3200.txt +blotted pg3200.txt +blotted; pg100.txt +blount pg100.txt +blouses, pg3200.txt +blow pg31100.txt, pg3200.txt, pg100.txt +blow! pg100.txt +blow!" pg3200.txt +blow, pg31100.txt, pg3200.txt, pg100.txt +blow-out, pg3200.txt +blow-out; pg3200.txt +blow-pipe, pg3200.txt +blow-pipe--the pg3200.txt +blow. pg31100.txt, pg3200.txt, pg100.txt +blow." pg3200.txt +blow: pg3200.txt +blow; pg3200.txt, pg100.txt +blowers-up! pg100.txt +blowin' pg3200.txt +blowing pg3200.txt, pg100.txt +blowing, pg3200.txt +blowing. pg3200.txt +blown pg3200.txt, pg100.txt +blown. pg100.txt +blown; pg100.txt +blows pg3200.txt, pg100.txt +blows! pg100.txt +blows!' pg3200.txt +blows, pg100.txt +blows. pg3200.txt, pg100.txt +blows; pg100.txt +blowsy!" pg31100.txt +blowy, pg3200.txt +blubber-and-slush." pg3200.txt +blubber. pg3200.txt +blubbering. pg3200.txt, pg100.txt +blucher pg3200.txt +blucher. pg3200.txt +blucherberg, pg3200.txt +bludgeon pg3200.txt +blue pg31100.txt, pg3200.txt, pg100.txt +blue! pg3200.txt +blue!' pg3200.txt +blue, pg31100.txt, pg3200.txt, pg100.txt +blue-caps, pg3200.txt +blue. pg3200.txt, pg100.txt +blue: pg3200.txt +blue; pg3200.txt +blue? pg100.txt +bluebells--a pg3200.txt +bluejay pg3200.txt +bluejay. pg3200.txt +bluejays pg3200.txt +bluejays." pg3200.txt +bluer, pg3200.txt +blues pg3200.txt +blues." pg3200.txt +bluest pg3200.txt +bluff pg3200.txt +bluff, pg3200.txt +bluffer. pg3200.txt +bluffs pg3200.txt +bluffs, pg3200.txt +bluffs,' pg3200.txt +bluffs. pg3200.txt +blumis's pg3200.txt +blunder pg31100.txt, pg3200.txt +blunder, pg31100.txt, pg3200.txt +blunder--lost pg3200.txt +blunder. pg3200.txt +blunder; pg3200.txt +blundered pg3200.txt +blundered, pg31100.txt, pg3200.txt +blunders!" pg31100.txt +blunders, pg31100.txt, pg3200.txt +blunders; pg31100.txt +blunt pg100.txt +blunt! pg3200.txt +blunt's pg3200.txt +blunt, pg100.txt +blunt. pg3200.txt, pg100.txt +blunt: pg100.txt +blunt; pg100.txt +bluntly. pg100.txt +bluntly? pg100.txt +blunts pg100.txt +blur pg3200.txt +blurred pg3200.txt +blurted pg3200.txt +blush pg31100.txt, pg3200.txt, pg100.txt +blush! pg100.txt +blush'd pg100.txt +blush, pg3200.txt, pg100.txt +blush. pg31100.txt, pg3200.txt, pg100.txt +blush." pg3200.txt +blush; pg31100.txt +blush? pg3200.txt, pg100.txt +blushed pg31100.txt, pg3200.txt +blushed, pg3200.txt +blushed--"yet pg3200.txt +blushed." pg3200.txt +blushes pg3200.txt, pg100.txt +blushes, pg3200.txt, pg100.txt +blushes. pg3200.txt, pg100.txt +blushing pg31100.txt, pg3200.txt, pg100.txt +blushing, pg31100.txt, pg3200.txt +blushing." pg3200.txt +bluster." pg3200.txt +blusterer pg3200.txt +bo'd'n-house, pg3200.txt +bo-peep pg100.txt +boar pg100.txt +boar, pg100.txt +board pg31100.txt, pg3200.txt, pg100.txt +board!" pg3200.txt +board's pg3200.txt +board, pg31100.txt, pg3200.txt, pg100.txt +board- pg3200.txt +board--nearing pg3200.txt +board--that pg3200.txt +board-clatter. pg3200.txt +board-fence, pg3200.txt +board. pg3200.txt, pg100.txt +board." pg31100.txt, pg3200.txt +board; pg100.txt +boarded pg3200.txt +boarder pg3200.txt +boarder, pg3200.txt +boarders pg3200.txt +boarders, pg3200.txt +boarders--the pg3200.txt +boarders. pg3200.txt +boarding-house, pg3200.txt +boarding-house. pg3200.txt +boarding-houses pg3200.txt +boards pg3200.txt +boards, pg3200.txt +boards. pg3200.txt +boards." pg3200.txt +boarely pg3200.txt +boars' pg3200.txt +boast pg31100.txt, pg3200.txt, pg100.txt +boast! pg100.txt +boast, pg31100.txt, pg3200.txt, pg100.txt +boast. pg3200.txt, pg100.txt +boast." pg31100.txt +boast; pg31100.txt, pg100.txt +boast? pg100.txt +boasted pg31100.txt +boasted, pg3200.txt +boasting pg31100.txt, pg3200.txt +boasting, pg3200.txt +boasting. pg100.txt +boasts pg3200.txt +boat pg3200.txt, pg100.txt +boat! pg3200.txt, pg100.txt +boat's pg3200.txt +boat) pg3200.txt +boat, pg3200.txt, pg100.txt +boat-management pg3200.txt +boat. pg3200.txt, pg100.txt +boat." pg3200.txt +boat.' pg3200.txt +boat; pg3200.txt +boat?" pg3200.txt +boating pg3200.txt +boatman." pg3200.txt +boatmen pg3200.txt +boatmen, pg3200.txt +boats pg3200.txt, pg100.txt +boats, pg3200.txt +boats. pg3200.txt +boats?' pg3200.txt +boatswain pg100.txt +boatswain! pg3200.txt, pg100.txt +bob pg3200.txt +bob, pg3200.txt, pg100.txt +bob- pg3200.txt +bobbing pg3200.txt +bobtail pg3200.txt +bobtailed pg3200.txt +boccaccio pg3200.txt +bode pg100.txt +bode? pg100.txt +boded pg31100.txt +bodements. pg100.txt +bodes pg100.txt +bodes. pg100.txt +bodied pg3200.txt +bodies pg31100.txt, pg3200.txt, pg100.txt +bodies! pg100.txt +bodies), pg3200.txt +bodies, pg3200.txt, pg100.txt +bodies- pg100.txt +bodies. pg3200.txt, pg100.txt +bodies." pg3200.txt +bodies; pg3200.txt +bodies? pg100.txt +bodies?" pg3200.txt +bodily pg100.txt +boding pg3200.txt +bodings. pg3200.txt +bodkin. pg100.txt +body pg31100.txt, pg3200.txt, pg100.txt +body! pg31100.txt, pg100.txt +body!"--then, pg3200.txt +body" pg3200.txt +body's pg31100.txt, pg3200.txt, pg100.txt +body, pg31100.txt, pg3200.txt, pg100.txt +body- pg100.txt +body--" pg3200.txt +body--as pg3200.txt +body--for pg3200.txt +body--therefore pg3200.txt +body-curer. pg100.txt +body-guard. pg3200.txt +body-servant pg3200.txt +body-snatcher."] pg3200.txt +body-snatchers, pg3200.txt +body. pg31100.txt, pg3200.txt, pg100.txt +body." pg31100.txt, pg3200.txt +body.--what pg31100.txt +body; pg3200.txt, pg100.txt +body? pg100.txt +body?" pg3200.txt +body] pg100.txt +body]. pg100.txt +bodyguard, pg3200.txt +boeotain pg3200.txt +boeotian pg3200.txt +boer pg3200.txt +boer--now pg3200.txt +boer. pg3200.txt +boer: pg3200.txt +boers pg3200.txt +boers: pg3200.txt +boers; pg3200.txt +bofe pg3200.txt +boggle." pg3200.txt +boggs pg3200.txt +boggs, pg3200.txt +bogs. pg100.txt +bogus pg3200.txt +bohemia pg100.txt +bohemia, pg100.txt +bohemia. pg100.txt +bohemia; pg100.txt +bohemia? pg100.txt +bohemian pg3200.txt +bohun. pg100.txt +boil pg3200.txt +boil, pg100.txt +boil. pg3200.txt +boiled pg31100.txt, pg3200.txt +boiled, pg3200.txt +boiled. pg3200.txt +boiler-deck pg3200.txt +boiler-iron pg3200.txt +boiler-iron. pg3200.txt +boilers! pg3200.txt +boilers. pg3200.txt +boiling pg3200.txt, pg100.txt +boiling, pg3200.txt +boiling." pg3200.txt +boiling; pg3200.txt +boinville, pg3200.txt +boinville: pg3200.txt +boisterous pg3200.txt, pg100.txt +boisterous. pg3200.txt +boisterous." pg3200.txt +boisterously pg3200.txt +bok, pg3200.txt +bold pg31100.txt, pg3200.txt, pg100.txt +bold'ned pg100.txt +bold, pg3200.txt, pg100.txt +bold. pg3200.txt, pg100.txt +bold." pg3200.txt +bold; pg100.txt +bold? pg100.txt +bolder pg31100.txt +bolder. pg100.txt +bolder; pg100.txt +boldly pg31100.txt, pg3200.txt +boldly, pg100.txt +boldly. pg100.txt +boldly." pg3200.txt +boldness pg3200.txt +boldness. pg100.txt +bolingbroke pg100.txt +bolingbroke!' pg100.txt +bolingbroke's. pg100.txt +bolingbroke, pg100.txt +bolingbroke- pg100.txt +bolingbroke. pg100.txt +bolingbroke; pg100.txt +bolingbroke? pg100.txt +bolingbroke?' pg100.txt +bologna. pg3200.txt +bolster pg100.txt +bolster, pg100.txt +bolt pg100.txt +bolt, pg3200.txt, pg100.txt +bolt-head pg3200.txt +bolted pg100.txt +bolting. pg100.txt +bolton pg3200.txt +bolton' pg3200.txt +bolton's pg3200.txt +bolton, pg3200.txt +bolton," pg3200.txt +bolton. pg3200.txt +bolton?" pg3200.txt +bolts pg3200.txt, pg100.txt +bolts, pg3200.txt, pg100.txt +bolwoggoly, pg3200.txt +bomb- pg3200.txt +bombards, pg100.txt +bombastic pg3200.txt +bombay pg3200.txt +bombay, pg3200.txt +bombay-aden,.................1,662 pg3200.txt +bombay. pg3200.txt +bombay; pg3200.txt +bombola pg3200.txt +bombshell pg3200.txt +bombshell. pg3200.txt +bon'd pg100.txt +bon? pg100.txt +bona, pg100.txt +bona. pg100.txt +bonanza pg3200.txt +bonanza, pg3200.txt +bond pg31100.txt, pg3200.txt, pg100.txt +bond, pg3200.txt, pg100.txt +bond--but pg3200.txt +bond-slave? pg100.txt +bond-street." pg31100.txt +bond. pg3200.txt, pg100.txt +bond; pg100.txt +bond? pg100.txt +bondage pg3200.txt, pg100.txt +bondage. pg3200.txt, pg100.txt +bondage?" pg3200.txt +bondage?' pg100.txt +bondman; pg100.txt +bonds pg3200.txt, pg100.txt +bonds, pg3200.txt, pg100.txt +bonds. pg3200.txt +bonds; pg100.txt +bone pg3200.txt, pg100.txt +bone, pg3200.txt, pg100.txt +bone-filter pg3200.txt +bone. pg3200.txt, pg100.txt +bone; pg3200.txt +bones pg3200.txt, pg100.txt +bones! pg100.txt +bones, pg3200.txt, pg100.txt +bones. pg3200.txt, pg100.txt +bones; pg3200.txt, pg100.txt +bones? pg100.txt +bonfire pg3200.txt +bonfires pg100.txt +bong-a-bong; pg3200.txt +bonheur. pg3200.txt +bonjour. pg100.txt +bonne pg100.txt +bonne? pg3200.txt +bonnet pg31100.txt, pg100.txt +bonnet, pg31100.txt +bonnet-shop." pg3200.txt +bonnet. pg31100.txt +bonnet." pg3200.txt +bonnetless pg3200.txt +bonnivard pg3200.txt +bonny pg3200.txt +bonny, pg100.txt +bonosus, pg3200.txt +bonwick pg3200.txt +bony pg3200.txt +bony, pg3200.txt +boobies, pg3200.txt +booby pg3200.txt +booby-hatch, pg3200.txt +booby." pg3200.txt +book pg31100.txt, pg3200.txt, pg100.txt +book! pg31100.txt, pg100.txt +book" pg3200.txt +book"--thomas pg3200.txt +book"; pg3200.txt +book' pg3200.txt +book'--the pg3200.txt +book's pg3200.txt +book's; pg3200.txt +book, pg31100.txt, pg3200.txt, pg100.txt +book,' pg3200.txt +book- pg3200.txt, pg100.txt +book--[professor pg3200.txt +book--it pg3200.txt +book-mates. pg100.txt +book-rest pg3200.txt +book-room, pg31100.txt +book-trade--that pg3200.txt +book. pg31100.txt, pg3200.txt, pg100.txt +book." pg31100.txt, pg3200.txt +book.' pg3200.txt +book: pg3200.txt, pg100.txt +book; pg3200.txt, pg100.txt +book? pg3200.txt, pg100.txt +book?" pg3200.txt +book?' pg3200.txt +book] pg100.txt +bookcase pg31100.txt +bookcase, pg31100.txt, pg3200.txt +booked pg3200.txt +bookish pg3200.txt +booklet. pg3200.txt +books pg31100.txt, pg3200.txt, pg100.txt +books! pg100.txt +books, pg31100.txt, pg3200.txt, pg100.txt +books,--the pg3200.txt +books--of pg31100.txt +books. pg3200.txt, pg100.txt +books." pg31100.txt, pg3200.txt +books: pg3200.txt +books; pg31100.txt, pg3200.txt, pg100.txt +books?" pg3200.txt +bookseller pg31100.txt +bookseller's pg3200.txt +booksellers pg3200.txt +booksellers, pg31100.txt +bookstore pg3200.txt +booleroo pg3200.txt +boom pg3200.txt +boom!! pg3200.txt +boom, pg3200.txt +boom. pg3200.txt +boomerang pg3200.txt +boomerang. pg3200.txt +booming pg3200.txt +booming. pg3200.txt +booming." pg3200.txt +boon pg3200.txt, pg100.txt +boon, pg100.txt +boon. pg100.txt +boon; pg100.txt +boons." pg3200.txt +boor. pg3200.txt +boorish pg3200.txt +boot pg3200.txt, pg100.txt +boot! pg100.txt +boot, pg31100.txt, pg100.txt +boot-hose pg100.txt +boot-jack. pg3200.txt +boot-jacks pg3200.txt +boot-polishing, pg3200.txt +boot-sole pg3200.txt +boot. pg100.txt +boot; pg3200.txt, pg100.txt +bootblack, pg3200.txt +booted pg3200.txt +booted. pg100.txt +booth pg3200.txt +booths pg3200.txt +bootless pg100.txt +bootless. pg100.txt +boots pg3200.txt, pg100.txt +boots!--see pg3200.txt +boots, pg3200.txt +boots----" pg3200.txt +boots. pg3200.txt, pg100.txt +booty! pg3200.txt +booty, pg100.txt +booty. pg3200.txt +booty; pg100.txt +bopple. pg3200.txt +bor----" pg3200.txt +borachio. pg100.txt +borachio? pg100.txt +bordeaux pg100.txt +bordeaux. pg100.txt +border pg3200.txt +border, pg3200.txt +border. pg3200.txt +bordered pg31100.txt, pg3200.txt +borderers. pg100.txt +bordering pg3200.txt +borders pg31100.txt, pg3200.txt +borders, pg3200.txt +borders. pg3200.txt +bore pg31100.txt, pg3200.txt, pg100.txt +bore! pg3200.txt +bore, pg3200.txt, pg100.txt +bore--dug pg3200.txt +bore. pg100.txt +bore: pg100.txt +bore] pg3200.txt +boreas's pg3200.txt +bored pg3200.txt +bored, pg3200.txt +bored. pg3200.txt +borghese. pg3200.txt +borgia! pg3200.txt +boring pg3200.txt +borings pg3200.txt +born pg31100.txt, pg3200.txt, pg100.txt +born! pg100.txt +born!" pg3200.txt +born, pg3200.txt, pg100.txt +born- pg100.txt +born--what pg3200.txt +born. pg3200.txt, pg100.txt +born." pg31100.txt, pg3200.txt +born; pg31100.txt, pg3200.txt, pg100.txt +born? pg3200.txt, pg100.txt +born?" pg3200.txt +born?' pg3200.txt +borne pg31100.txt, pg3200.txt, pg100.txt +borne! pg100.txt +borne, pg100.txt +borne. pg31100.txt, pg100.txt +borne." pg31100.txt, pg3200.txt +borne; pg100.txt +borrow pg31100.txt, pg3200.txt, pg100.txt +borrow'd, pg100.txt +borrow'd. pg100.txt +borrow, pg3200.txt, pg100.txt +borrow. pg100.txt +borrow." pg3200.txt +borrowed pg3200.txt, pg100.txt +borrowed, pg3200.txt +borrowed. pg3200.txt +borrowing, pg3200.txt, pg100.txt +borrowing; pg3200.txt +bosh pg3200.txt +bosh!" pg3200.txt +boskos. pg100.txt +bosom pg31100.txt, pg3200.txt, pg100.txt +bosom! pg100.txt +bosom, pg3200.txt, pg100.txt +bosom. pg3200.txt, pg100.txt +bosom." pg3200.txt +bosom.- pg100.txt +bosom; pg100.txt +bosom? pg100.txt +bosoms pg31100.txt, pg100.txt +bosoms! pg3200.txt +bosoms, pg100.txt +bosoms. pg100.txt +bosoms? pg100.txt +boson? pg100.txt +bosphorus, pg3200.txt +bosphorus. pg3200.txt +bosporus pg3200.txt +boss pg3200.txt +boss!" pg3200.txt +boss!'] pg3200.txt +boss, pg3200.txt +boss-ship pg3200.txt +boss. pg3200.txt +boss." pg3200.txt +boss?" pg3200.txt +bosses." pg3200.txt +bossing. pg3200.txt +bossons. pg3200.txt +boston pg3200.txt +boston, pg3200.txt +boston--but pg3200.txt +boston--died pg3200.txt +boston--rightly, pg3200.txt +boston--this pg3200.txt +boston. pg3200.txt +boston." pg3200.txt +boston.' pg3200.txt +boston: pg3200.txt +bosworth pg100.txt +botany, pg3200.txt +botch pg3200.txt +botch'd. pg100.txt +botching pg3200.txt +both pg31100.txt, pg3200.txt, pg100.txt +both! pg3200.txt, pg100.txt +both!" pg3200.txt +both!--i pg31100.txt +both"; pg3200.txt +both, pg31100.txt, pg3200.txt, pg100.txt +both--" pg31100.txt, pg3200.txt +both--that pg3200.txt +both--then pg3200.txt +both--why?" pg3200.txt +both. pg31100.txt, pg3200.txt, pg100.txt +both." pg31100.txt, pg3200.txt +both.' pg3200.txt +both: pg31100.txt +both; pg31100.txt, pg100.txt +both? pg3200.txt, pg100.txt +bother pg3200.txt +bother, pg3200.txt +bothered pg3200.txt +bothering pg3200.txt +bothering, pg3200.txt +bothers? pg3200.txt +bothersome pg3200.txt +bots. pg3200.txt +botte pg3200.txt +botticelli pg3200.txt +botticelli's pg3200.txt +bottle pg31100.txt, pg3200.txt, pg100.txt +bottle, pg100.txt +bottle- pg100.txt +bottle. pg3200.txt, pg100.txt +bottle." pg3200.txt +bottle; pg100.txt +bottled pg3200.txt +bottlehorn, pg3200.txt +bottles pg3200.txt +bottles, pg3200.txt +bottles; pg3200.txt +bottom pg31100.txt, pg3200.txt, pg100.txt +bottom!" pg3200.txt +bottom, pg3200.txt +bottom-- pg3200.txt +bottom. pg3200.txt, pg100.txt +bottom." pg3200.txt +bottom; pg100.txt +bottom? pg3200.txt, pg100.txt +bottomed pg3200.txt +bottomless pg3200.txt +bottoms pg31100.txt, pg3200.txt +boucher pg3200.txt +bouciqualt; pg100.txt +bouge? pg100.txt +bough pg3200.txt +bough, pg3200.txt, pg100.txt +bough. pg100.txt +boughs, pg100.txt +boughs. pg3200.txt, pg100.txt +bought pg31100.txt, pg3200.txt, pg100.txt +bought, pg31100.txt, pg3200.txt +bought. pg3200.txt, pg100.txt +bought." pg3200.txt +bought; pg3200.txt, pg100.txt +bought?" pg3200.txt +boulder pg3200.txt +boulder, pg3200.txt +boulders pg3200.txt +boulders, pg3200.txt +boulders?" pg3200.txt +boulders?' pg3200.txt +boulevard pg3200.txt +boulevard, pg3200.txt +boulevards, pg3200.txt +boun' pg3200.txt +bounce pg3200.txt +bounce, pg3200.txt +bounce. pg3200.txt +bounce; pg100.txt +bounced?" pg3200.txt +bounced?' pg3200.txt +bound pg31100.txt, pg3200.txt, pg100.txt +bound! pg100.txt +bound, pg3200.txt, pg100.txt +bound- pg100.txt +bound. pg3200.txt, pg100.txt +bound; pg100.txt +bound? pg100.txt +bound?" pg3200.txt +boundaries. pg3200.txt +bounded pg3200.txt +bounding pg3200.txt +boundless pg3200.txt +boundless! pg3200.txt +boundless. pg3200.txt +bounds pg3200.txt, pg100.txt +bounds, pg100.txt +bounds. pg3200.txt, pg100.txt +bounds; pg3200.txt +bounds? pg100.txt +bounteous pg100.txt +bounteous, pg100.txt +bounteous. pg100.txt +bounteously, pg100.txt +bounties pg100.txt +bountiful pg100.txt +bountiful. pg31100.txt +bountifully pg3200.txt +bounty pg31100.txt, pg3200.txt, pg100.txt +bounty! pg100.txt +bounty, pg100.txt +bounty? pg3200.txt +bouquet; pg3200.txt +bouquets pg3200.txt +bourbon pg3200.txt +bourbon, pg3200.txt +bourgeois; pg3200.txt +bourget pg3200.txt +bourget, pg3200.txt +bourget. pg3200.txt +bourget? pg3200.txt +bourgh, pg31100.txt +bourgh," pg31100.txt +bourgh. pg31100.txt +bourlemont pg3200.txt +bourlemont! pg3200.txt +bourlemont, pg3200.txt +bourlemont? pg3200.txt +bourn pg100.txt +bourn. pg100.txt +bourne pg3200.txt +bout--he pg3200.txt +boutonniere?" pg3200.txt +bouts pg3200.txt +bow pg31100.txt, pg3200.txt, pg100.txt +bow!" pg3200.txt +bow'd- pg100.txt +bow'r, pg100.txt +bow'rs. pg100.txt +bow, pg31100.txt, pg3200.txt, pg100.txt +bow-wow.] pg100.txt +bow. pg3200.txt, pg100.txt +bow." pg31100.txt +bow: pg3200.txt +bow; pg3200.txt +bow? pg100.txt +bow?" pg3200.txt +bowed pg3200.txt +bowed, pg3200.txt, pg100.txt +bowed. pg31100.txt, pg3200.txt, pg100.txt +bowels pg3200.txt +bowels, pg100.txt +bowels. pg3200.txt +bowels; pg100.txt +bowels?" pg3200.txt +bowen pg3200.txt +bower pg3200.txt +bower, pg100.txt +bower. pg100.txt +bower." pg3200.txt +bowers pg3200.txt +bowers's; pg3200.txt +bowery pg3200.txt +bowie pg3200.txt +bowie-knife. pg3200.txt +bowing pg3200.txt +bowing. pg3200.txt +bowing; pg100.txt +bowings pg3200.txt +bowl pg3200.txt, pg100.txt +bowl, pg100.txt +bowl. pg3200.txt, pg100.txt +bowl." pg3200.txt +bowlder pg3200.txt +bowled pg3200.txt +bowler; pg100.txt +bowling pg3200.txt +bowling, pg3200.txt +bowling-alleys pg3200.txt +bowls pg3200.txt +bowls. pg100.txt +bowral pg3200.txt +bows pg31100.txt, pg3200.txt, pg100.txt +bows! pg3200.txt +bows, pg3200.txt, pg100.txt +bows. pg3200.txt +bows: pg3200.txt +bows; pg3200.txt +bowsprit pg3200.txt +bowstring, pg100.txt +box pg31100.txt, pg3200.txt, pg100.txt +box, pg31100.txt, pg3200.txt +box-office pg3200.txt +box. pg3200.txt, pg100.txt +box." pg31100.txt, pg3200.txt +box; pg3200.txt +box? pg100.txt +box?" pg3200.txt +box?' pg3200.txt +box] pg100.txt +boxed.' pg3200.txt +boxed; pg3200.txt +boxer pg3200.txt +boxes pg3200.txt +boxes, pg3200.txt, pg100.txt +boxing, pg3200.txt +boy pg31100.txt, pg3200.txt, pg100.txt +boy! pg3200.txt, pg100.txt +boy!" pg3200.txt +boy!' pg3200.txt +boy'd pg3200.txt +boy's pg3200.txt +boy, pg3200.txt, pg100.txt +boy," pg3200.txt +boy- pg100.txt +boy--" pg31100.txt, pg3200.txt +boy--i pg3200.txt +boy-life pg3200.txt +boy-twin pg3200.txt +boy. pg31100.txt, pg3200.txt, pg100.txt +boy." pg3200.txt +boy.' pg3200.txt, pg100.txt +boy.- pg100.txt +boy.] pg100.txt +boy: pg100.txt +boy; pg3200.txt, pg100.txt +boy? pg100.txt +boy?" pg31100.txt, pg3200.txt +boycott pg3200.txt +boycotted pg3200.txt +boyet pg100.txt +boyet, pg100.txt +boyet. pg100.txt +boyet.' pg100.txt +boyet? pg100.txt +boyhood pg3200.txt +boyhood, pg3200.txt +boyhood--columbus pg3200.txt +boyhood--it's pg3200.txt +boyhood. pg3200.txt +boys pg31100.txt, pg3200.txt, pg100.txt +boys! pg3200.txt, pg100.txt +boys!" pg3200.txt +boys', pg100.txt +boys, pg3200.txt, pg100.txt +boys," pg31100.txt +boys--" pg3200.txt +boys--good pg3200.txt +boys--twins. pg3200.txt +boys--we've pg3200.txt +boys. pg31100.txt, pg3200.txt, pg100.txt +boys." pg3200.txt +boys; pg3200.txt, pg100.txt +boys?" pg3200.txt +brabant, pg100.txt +brabantio pg100.txt +brabantio, pg100.txt +brabbler. pg100.txt +brac'd pg100.txt +brace pg31100.txt, pg3200.txt, pg100.txt +brace, pg3200.txt, pg100.txt +brace. pg100.txt +bracelet, pg100.txt +bracelet- pg100.txt +bracelet] pg100.txt +bracelets pg31100.txt +brach. pg100.txt +bracing pg3200.txt +brackish pg3200.txt +bracknell pg3200.txt +bracknell. pg3200.txt +bracknell." pg3200.txt +braddock pg3200.txt +bradish? pg3200.txt +brady's pg3200.txt +brady. pg3200.txt +brady?" pg3200.txt +brag pg3200.txt, pg100.txt +brag. pg100.txt +bragg'd pg100.txt +braggards pg100.txt +braggart pg3200.txt +braggart, pg100.txt +bragge; pg31100.txt +bragged pg3200.txt +bragges, pg31100.txt +bragging pg3200.txt, pg100.txt +brags pg100.txt +braham pg3200.txt +braham. pg3200.txt +braham." pg3200.txt +brahma, pg3200.txt +brahmin pg3200.txt +brahmin, pg3200.txt +brahmin--coolin pg3200.txt +brahmins pg3200.txt +braid, pg100.txt +braids pg3200.txt +brain pg31100.txt, pg3200.txt, pg100.txt +brain! pg3200.txt +brain, pg3200.txt, pg100.txt +brain--clings pg3200.txt +brain. pg3200.txt, pg100.txt +brain." pg3200.txt +brain: pg3200.txt +brain; pg3200.txt, pg100.txt +brain? pg100.txt +brained pg3200.txt +brainford, pg100.txt +brainford. pg100.txt +brainford; pg100.txt +brainford? pg100.txt +brains pg31100.txt, pg3200.txt, pg100.txt +brains! pg100.txt +brains, pg3200.txt, pg100.txt +brains. pg3200.txt, pg100.txt +brains; pg3200.txt +brains? pg100.txt +brainsick pg100.txt +brake pg3200.txt, pg100.txt +brake--" pg3200.txt +brake; pg100.txt +brakeman pg3200.txt +brakemen. pg3200.txt +brakenbury pg100.txt +brakenbury, pg100.txt +brakes, pg100.txt +bramble-infested pg3200.txt +bran pg3200.txt +bran! pg100.txt +bran-mash pg3200.txt +bran; pg3200.txt +branch pg31100.txt, pg3200.txt, pg100.txt +branch, pg31100.txt, pg3200.txt +branch--" pg3200.txt +branch-road pg3200.txt +branch. pg3200.txt +branch?" pg3200.txt +branch?--well, pg3200.txt +branched pg3200.txt +branches pg3200.txt, pg100.txt +branches, pg3200.txt, pg100.txt +branches. pg3200.txt +branches.' pg3200.txt +branching pg3200.txt +brand pg3200.txt, pg100.txt +brand, pg100.txt +brand. pg3200.txt +branded pg3200.txt, pg100.txt +branded--[without pg3200.txt +brandenburgh pg3200.txt +brander pg3200.txt +brander's pg3200.txt +brandies, pg3200.txt +branding pg3200.txt +branding-iron pg3200.txt +brandon pg31100.txt, pg100.txt +brandon!" pg31100.txt +brandon's pg31100.txt +brandon, pg31100.txt +brandon," pg31100.txt +brandon- pg100.txt +brandon--or pg31100.txt +brandon. pg31100.txt, pg100.txt +brandon." pg31100.txt +brands pg3200.txt, pg100.txt +brands. pg100.txt +brandt?" pg3200.txt +brandy pg3200.txt +brandy, pg3200.txt +brannan pg3200.txt +bras. pg100.txt +bras? pg100.txt +brash pg3200.txt +brash. pg3200.txt +brass pg3200.txt +brass, pg3200.txt, pg100.txt +brass-mounted pg3200.txt +brass-work pg3200.txt +brass. pg3200.txt +brass? pg100.txt +brast" pg3200.txt +brat pg31100.txt +brat? pg100.txt +brav'ry. pg100.txt +brave pg31100.txt, pg3200.txt, pg100.txt +brave! pg100.txt +brave, pg3200.txt, pg100.txt +brave. pg3200.txt +brave." pg3200.txt +brave; pg3200.txt +brave? pg100.txt +braved pg3200.txt +braved, pg100.txt +bravely pg3200.txt, pg100.txt +bravely, pg3200.txt +bravely- pg100.txt +bravely. pg100.txt +bravery pg3200.txt +bravery. pg3200.txt +braves. pg100.txt +bravest pg3200.txt +braving pg100.txt +bravo, pg31100.txt +brawl pg100.txt +brawl'd pg100.txt +brawl, pg100.txt +brawl. pg3200.txt, pg100.txt +brawl? pg100.txt +brawler pg3200.txt +brawls. pg100.txt +brawn pg31100.txt +brawn, pg100.txt +bray pg100.txt +bray, pg100.txt +brayed, pg3200.txt +braying pg3200.txt +brays." pg3200.txt +brayton, pg3200.txt +braz'd pg100.txt +brazen, pg3200.txt +brazenest, pg3200.txt +brazier pg100.txt +brazil, pg3200.txt +brazilian pg3200.txt +brazilians pg3200.txt +breach pg31100.txt, pg3200.txt, pg100.txt +breach! pg100.txt +breach, pg100.txt +breach. pg100.txt +breach; pg31100.txt +bread pg31100.txt, pg3200.txt, pg100.txt +bread, pg3200.txt, pg100.txt +bread-and-butter; pg3200.txt +bread-crumbs; pg3200.txt +bread. pg3200.txt, pg100.txt +bread; pg100.txt +bread? pg100.txt +breadfruit, pg3200.txt +breadth pg3200.txt, pg100.txt +breadth? pg100.txt +break pg31100.txt, pg3200.txt, pg100.txt +break! pg100.txt +break, pg3200.txt, pg100.txt +break. pg3200.txt, pg100.txt +break." pg3200.txt +break; pg100.txt +breakages, pg3200.txt +breakers, pg3200.txt +breaketh." pg3200.txt +breakfast pg31100.txt, pg3200.txt, pg100.txt +breakfast!" pg3200.txt +breakfast, pg31100.txt, pg3200.txt, pg100.txt +breakfast-room pg31100.txt, pg3200.txt +breakfast-room, pg31100.txt +breakfast-room. pg31100.txt +breakfast-table pg3200.txt +breakfast-table--made pg31100.txt +breakfast-time, pg31100.txt +breakfast. pg31100.txt, pg3200.txt, pg100.txt +breakfast." pg3200.txt +breakfast; pg100.txt +breakfast?" pg3200.txt +breakfasted pg3200.txt +breakfasted? pg31100.txt +breakfasting pg31100.txt +breakfasts pg3200.txt +breaking pg31100.txt, pg3200.txt +breaking, pg3200.txt, pg100.txt +breaking. pg3200.txt, pg100.txt +breaking; pg100.txt +breaking? pg100.txt +breaks pg3200.txt, pg100.txt +breaks, pg3200.txt +breaks--and pg3200.txt +breaks. pg100.txt +breaks; pg100.txt +breaks? pg100.txt +breast pg31100.txt, pg3200.txt, pg100.txt +breast! pg100.txt +breast!" pg3200.txt +breast, pg3200.txt, pg100.txt +breast- pg3200.txt, pg100.txt +breast--one pg3200.txt +breast-pin pg3200.txt +breast-pin, pg3200.txt +breast. pg3200.txt, pg100.txt +breast." pg3200.txt +breast."--ibid. pg3200.txt +breast; pg3200.txt, pg100.txt +breast? pg100.txt +breast?" pg3200.txt +breast] pg100.txt +breasted pg3200.txt, pg100.txt +breastpin, pg3200.txt +breastplate pg3200.txt +breastplate, pg3200.txt +breastplate. pg3200.txt +breasts pg3200.txt, pg100.txt +breasts, pg100.txt +breasts. pg3200.txt, pg100.txt +breastworks pg3200.txt +breath pg31100.txt, pg3200.txt, pg100.txt +breath! pg100.txt +breath'd pg100.txt +breath'd, pg100.txt +breath'd. pg100.txt +breath, pg31100.txt, pg3200.txt, pg100.txt +breath-- pg3200.txt +breath--but pg3200.txt +breath--then pg3200.txt +breath. pg31100.txt, pg3200.txt, pg100.txt +breath." pg3200.txt +breath.' pg100.txt +breath: pg3200.txt, pg100.txt +breath; pg31100.txt, pg3200.txt, pg100.txt +breath? pg100.txt +breathe pg31100.txt, pg3200.txt, pg100.txt +breathe! pg100.txt +breathe, pg3200.txt, pg100.txt +breathe. pg31100.txt, pg3200.txt +breathe; pg100.txt +breathe? pg100.txt +breathed pg3200.txt +breather. pg100.txt +breathes pg3200.txt, pg100.txt +breathes, pg100.txt +breathes. pg100.txt +breathest, pg100.txt +breathing pg3200.txt, pg100.txt +breathing, pg3200.txt +breathing. pg3200.txt +breathing; pg3200.txt, pg100.txt +breathings pg3200.txt +breathings. pg3200.txt +breathless pg31100.txt, pg3200.txt, pg100.txt +breathless, pg3200.txt +breaths pg3200.txt +breaths, pg100.txt +breaths. pg100.txt +breaths; pg100.txt +breckinridge, pg3200.txt +bred pg3200.txt, pg100.txt +bred! pg100.txt +bred, pg100.txt +bred. pg100.txt +bred; pg100.txt +breech-clout; pg3200.txt +breeches pg3200.txt +breeches, pg3200.txt, pg100.txt +breeches-ball. pg31100.txt +breeches-button. pg3200.txt +breeches? pg100.txt +breed pg3200.txt, pg100.txt +breed! pg3200.txt +breed, pg3200.txt, pg100.txt +breed- pg100.txt +breed--a pg3200.txt +breed. pg3200.txt, pg100.txt +breed." pg3200.txt +breed?' pg3200.txt +breeder pg100.txt +breeding pg3200.txt, pg100.txt +breeding!" pg31100.txt +breeding, pg100.txt +breeding- pg100.txt +breeding. pg31100.txt, pg100.txt +breeding; pg100.txt +breeds pg3200.txt, pg100.txt +breeds, pg100.txt +breeds. pg100.txt +breeze pg31100.txt, pg3200.txt, pg100.txt +breeze, pg3200.txt +breeze--and pg31100.txt +breeze--the pg3200.txt +breeze. pg3200.txt +breeze; pg3200.txt +breezes pg3200.txt +breezy pg3200.txt +breil. pg3200.txt +brer pg3200.txt +bret pg3200.txt +bretagne." pg3200.txt +bretheren. pg100.txt +brethren pg3200.txt, pg100.txt +brethren, pg3200.txt, pg100.txt +brethren--" pg3200.txt +brethren. pg3200.txt, pg100.txt +brethren." pg3200.txt +brevity. pg3200.txt +brevity.'- pg100.txt +brevity: pg3200.txt +brevity; pg3200.txt +brew pg3200.txt +brew'd. pg100.txt +brewery." pg3200.txt +brewery?" pg3200.txt +brewing pg3200.txt +brib'd pg100.txt +bribe pg3200.txt, pg100.txt +bribe, pg3200.txt, pg100.txt +bribe; pg3200.txt +bribed. pg3200.txt +bribery pg31100.txt +bribery. pg3200.txt +bribes pg100.txt +bric-a-brac pg3200.txt +bric-a-brac, pg3200.txt +bric-a-brac. pg3200.txt +brick pg31100.txt, pg3200.txt, pg100.txt +brick, pg100.txt +brick-a-bracker pg3200.txt +brick-mould. pg3200.txt +brick-pile--i pg3200.txt +brick-red. pg3200.txt +brick. pg3200.txt +brick." pg3200.txt +brick.... pg3200.txt +brick; pg3200.txt +brickbats pg3200.txt +brickbats. pg3200.txt +bricklayer. pg100.txt +bricks pg3200.txt, pg100.txt +bricks, pg3200.txt +bricks," pg3200.txt +bricks--she's pg3200.txt +brickyard pg3200.txt +bride pg31100.txt, pg3200.txt, pg100.txt +bride! pg100.txt +bride's pg3200.txt +bride's. pg3200.txt +bride, pg31100.txt, pg3200.txt, pg100.txt +bride. pg31100.txt, pg100.txt +bride." pg3200.txt +bride.' pg100.txt +bride? pg100.txt +bridegroom's pg3200.txt +bridegroom? pg100.txt +brides pg3200.txt +brides, pg100.txt +bridge pg3200.txt +bridge, pg3200.txt +bridge- pg100.txt +bridge--any pg3200.txt +bridge. pg3200.txt, pg100.txt +bridge." pg3200.txt +bridge; pg3200.txt +bridge? pg100.txt +bridges pg3200.txt +bridges, pg3200.txt +bridget pg31100.txt, pg3200.txt +bridgewater; pg3200.txt +bridle pg3200.txt, pg100.txt +bridle-path pg3200.txt +bridled. pg100.txt +bridles pg3200.txt +brief pg31100.txt, pg3200.txt, pg100.txt +brief! pg100.txt +brief, pg31100.txt, pg3200.txt, pg100.txt +brief. pg3200.txt, pg100.txt +brief: pg100.txt +brief; pg3200.txt, pg100.txt +brief? pg100.txt +briefer pg3200.txt +briefest pg3200.txt +briefly pg31100.txt, pg3200.txt +briefly, pg3200.txt +briefly. pg100.txt +briefly: pg3200.txt +brier, pg100.txt +brier; pg100.txt +brierly pg3200.txt +brierly, pg3200.txt +brierly. pg3200.txt +brierly." pg3200.txt +brierly; pg3200.txt +brierly?" pg3200.txt +briers pg100.txt +briers, pg3200.txt, pg100.txt +briers. pg3200.txt +briers? pg3200.txt +brig pg3200.txt +brigade pg3200.txt +brigade," pg3200.txt +brigade. pg3200.txt +brigades pg3200.txt +brigadier pg3200.txt +brigadier, pg3200.txt +brigadier-general pg3200.txt +brigadier. pg3200.txt +brigand, pg3200.txt +brigands pg3200.txt +brigden; pg31100.txt +briggs, pg3200.txt +brigham pg3200.txt +brigham, pg3200.txt +bright pg3200.txt, pg100.txt +bright! pg100.txt +bright's pg3200.txt +bright, pg31100.txt, pg3200.txt, pg100.txt +bright-coloured pg3200.txt +bright. pg3200.txt, pg100.txt +bright." pg31100.txt, pg3200.txt +bright; pg3200.txt, pg100.txt +brighten pg31100.txt +brighten, pg3200.txt +brightened pg31100.txt, pg3200.txt +brightening pg31100.txt +brightening. pg3200.txt +brightens pg3200.txt +brighter pg3200.txt +brighter, pg3200.txt +brighter. pg3200.txt +brightest pg3200.txt +brighthelmstone pg31100.txt +brightly pg3200.txt +brightly, pg3200.txt +brightness pg31100.txt, pg3200.txt, pg100.txt +brighton pg31100.txt +brighton, pg31100.txt +brighton. pg31100.txt +brighton?" pg31100.txt +brilliancies pg3200.txt +brilliancy pg31100.txt, pg3200.txt +brilliancy, pg3200.txt +brilliant pg31100.txt, pg3200.txt +brilliant, pg31100.txt, pg3200.txt +brilliant. pg3200.txt +brilliant; pg31100.txt +brilliantly pg3200.txt +brilliantly. pg3200.txt +brilliants. pg3200.txt +brim pg3200.txt, pg100.txt +brim, pg3200.txt +brim. pg3200.txt, pg100.txt +brimful pg3200.txt +brimless pg3200.txt +brimming pg3200.txt +brims, pg100.txt +brimstone pg3200.txt +brimstone! pg100.txt +brimstone!" pg3200.txt +brimstone. pg3200.txt +brimstone: pg3200.txt +brimstone? pg3200.txt +brine pg100.txt +brine, pg100.txt +bring pg31100.txt, pg3200.txt, pg100.txt +bring'st pg100.txt +bring, pg31100.txt, pg3200.txt, pg100.txt +bring--which, pg3200.txt +bring. pg3200.txt, pg100.txt +bring: pg100.txt +bring? pg100.txt +bringer pg100.txt +bringing pg31100.txt, pg3200.txt, pg100.txt +bringing! pg3200.txt +bringing-up. pg3200.txt +bringing-up; pg100.txt +brings pg3200.txt, pg100.txt +brings, pg31100.txt, pg100.txt +brings. pg3200.txt, pg100.txt +brings; pg31100.txt +brink pg31100.txt, pg3200.txt +brink. pg100.txt +brinsley pg3200.txt +brisbane pg3200.txt +brisben pg3200.txt +brisk pg3200.txt +brisk: pg3200.txt +briskly pg3200.txt +briskly. pg3200.txt +briskly: pg3200.txt +briskness pg3200.txt +bristle pg100.txt +bristled pg3200.txt +bristling pg3200.txt +bristol pg31100.txt, pg100.txt +bristol, pg31100.txt +bristol--maple pg31100.txt +bristol--servant pg3200.txt +britain pg31100.txt, pg3200.txt, pg100.txt +britain, pg3200.txt, pg100.txt +britain. pg3200.txt, pg100.txt +britain." pg3200.txt +britain; pg3200.txt +britaine pg100.txt +britaine, pg100.txt +britaine. pg100.txt +british pg3200.txt +british. pg3200.txt +briton pg3200.txt +briton, pg3200.txt, pg100.txt +briton- pg100.txt +britons pg3200.txt +brittanica.) pg3200.txt +brittannica' pg3200.txt +brittany, pg100.txt +brittany. pg100.txt +brittlest pg3200.txt +bro pg3200.txt +bro, pg3200.txt +bro. pg3200.txt +broach'd pg100.txt +broach'd, pg100.txt +broached. pg3200.txt +broad pg31100.txt, pg3200.txt, pg100.txt +broad! pg100.txt +broad, pg3200.txt +broad-horns pg3200.txt +broad-sword, pg3200.txt +broader, pg3200.txt +broadly pg3200.txt +broadside pg3200.txt +broadside, pg3200.txt +broadside: pg3200.txt +broadway pg3200.txt +broadway. pg3200.txt +brobdignag pg3200.txt +brobdingnagians. pg3200.txt +brock! pg100.txt +brogans, pg3200.txt +brogue, pg3200.txt +broidered pg3200.txt +broil pg100.txt +broil'd? pg100.txt +broil, pg100.txt +broil. pg100.txt +broiled pg3200.txt +broiled. pg3200.txt +broiling? pg100.txt +broils pg100.txt +broils, pg100.txt +broils. pg100.txt +broils? pg100.txt +broke pg31100.txt, pg3200.txt, pg100.txt +broke! pg100.txt +broke!" pg3200.txt +broke, pg31100.txt, pg3200.txt, pg100.txt +broke. pg3200.txt, pg100.txt +broke." pg31100.txt, pg3200.txt +broke: pg3200.txt +broken pg31100.txt, pg3200.txt, pg100.txt +broken!" pg3200.txt +broken" pg3200.txt +broken, pg3200.txt, pg100.txt +broken- pg100.txt +broken-down pg3200.txt +broken-hearted, pg3200.txt +broken-hearted. pg3200.txt +broken-hearted; pg3200.txt +broken-winded pg31100.txt +broken. pg31100.txt, pg3200.txt, pg100.txt +broken: pg3200.txt, pg100.txt +broken; pg3200.txt, pg100.txt +broken? pg100.txt +brokenhearted; pg3200.txt +brokenly pg100.txt +broker! pg100.txt +broker'; pg100.txt +broker. pg3200.txt, pg100.txt +brokers pg3200.txt +brokers, pg100.txt +brokers." pg3200.txt +bromley, pg3200.txt +bronchitis. pg3200.txt +broncho. pg3200.txt +brontosaurian. pg3200.txt +bronze pg3200.txt +bronzed pg3200.txt +bronzes pg3200.txt +brooch pg100.txt +brooch, pg100.txt +brooch. pg100.txt +brood pg3200.txt +brood, pg100.txt +brood. pg100.txt +brood; pg100.txt +brooded pg3200.txt +brooding pg3200.txt +brooding, pg3200.txt +brook pg3200.txt, pg100.txt +brook! pg100.txt +brook'd pg100.txt +brook, pg3200.txt, pg100.txt +brook. pg3200.txt, pg100.txt +brook; pg100.txt +brooklyn pg3200.txt +brooklyn, pg3200.txt +brooklyn: pg3200.txt +brooklyn; pg3200.txt +brooks pg3200.txt, pg100.txt +brooks, pg100.txt +brooks--and pg3200.txt +brooks. pg3200.txt +broom pg100.txt +broom, pg3200.txt +broom--importations pg3200.txt +broom-closet, pg3200.txt +brooms pg3200.txt +broomstick pg3200.txt +broth, pg100.txt +brothel pg100.txt +brother pg31100.txt, pg3200.txt, pg100.txt +brother! pg100.txt +brother!" pg31100.txt, pg3200.txt +brother's pg31100.txt, pg3200.txt +brother's, pg31100.txt, pg100.txt +brother's. pg100.txt +brother's." pg31100.txt, pg3200.txt +brother's: pg3200.txt +brother's; pg31100.txt +brother's? pg100.txt +brother) pg3200.txt +brother, pg31100.txt, pg3200.txt, pg100.txt +brother," pg31100.txt +brother- pg3200.txt, pg100.txt +brother-- pg100.txt +brother--"how pg31100.txt +brother--do pg3200.txt +brother--let pg3200.txt +brother--or pg3200.txt +brother--very, pg31100.txt +brother-in- pg3200.txt +brother-in-law pg31100.txt, pg3200.txt +brother-in-law, pg31100.txt +brother-in-law--" pg3200.txt +brother-in-law. pg3200.txt, pg100.txt +brother-like. pg100.txt +brother-love pg100.txt +brother. pg31100.txt, pg3200.txt, pg100.txt +brother." pg31100.txt, pg3200.txt +brother.' pg100.txt +brother: pg3200.txt, pg100.txt +brother; pg31100.txt, pg3200.txt, pg100.txt +brother? pg100.txt +brother?" pg31100.txt, pg3200.txt +brotherhood pg3200.txt, pg100.txt +brotherhood, pg100.txt +brotherhood. pg100.txt +brotherhood? pg3200.txt +brotherly pg31100.txt, pg3200.txt, pg100.txt +brotherly, pg100.txt +brothers pg31100.txt, pg3200.txt, pg100.txt +brothers! pg3200.txt +brothers, pg31100.txt, pg3200.txt, pg100.txt +brothers," pg3200.txt +brothers,' pg3200.txt +brothers. pg3200.txt, pg100.txt +brothers." pg31100.txt, pg3200.txt +brothers.") pg3200.txt +brothers; pg100.txt +brothers? pg100.txt +brought pg31100.txt, pg3200.txt, pg100.txt +brought, pg3200.txt, pg100.txt +brought. pg3200.txt, pg100.txt +brought; pg3200.txt +brow pg31100.txt, pg3200.txt, pg100.txt +brow! pg3200.txt +brow, pg3200.txt, pg100.txt +brow. pg100.txt +brow." pg31100.txt, pg3200.txt +brow: pg100.txt +brow; pg3200.txt, pg100.txt +brow? pg100.txt +brown pg31100.txt, pg3200.txt +brown's pg3200.txt +brown, pg31100.txt, pg3200.txt +brown. pg3200.txt, pg100.txt +brown.' pg3200.txt +brown?" pg3200.txt +brown?' pg3200.txt +browne, pg3200.txt +browne. pg3200.txt +browned pg3200.txt +browning pg3200.txt +browning. pg3200.txt +browns pg3200.txt +browns. pg3200.txt +brows pg3200.txt, pg100.txt +brows, pg3200.txt, pg100.txt +brows- pg100.txt +brows. pg100.txt +brows; pg100.txt +browse pg100.txt +bruder. pg3200.txt +bruis'd pg100.txt +bruise pg3200.txt +bruise- pg100.txt +bruise; pg100.txt +bruised pg3200.txt +bruised, pg3200.txt +bruiser pg3200.txt +bruising pg100.txt +bruit pg3200.txt +bruited, pg100.txt +brummagem. pg3200.txt +brundusium pg100.txt +brunelleschi pg3200.txt +brunette--but pg3200.txt +brung pg3200.txt +brunswick pg31100.txt +brunt pg31100.txt +brusac, pg3200.txt +brush pg3200.txt, pg100.txt +brush'd pg100.txt +brush, pg3200.txt +brush. pg3200.txt +brush; pg3200.txt +brushed pg3200.txt +brushed, pg3200.txt +brushes pg3200.txt +brushes. pg3200.txt +brusque pg3200.txt +brusque, pg3200.txt +brusquely: pg3200.txt +brussels" pg3200.txt +brutal pg3200.txt +brutal, pg31100.txt +brutality pg3200.txt +brutalized pg3200.txt +brute pg3200.txt +brute!" pg3200.txt +brutes pg3200.txt +brutes, pg3200.txt +brutes--now pg3200.txt +brutes. pg3200.txt +brutes; pg3200.txt +brutus pg3200.txt, pg100.txt +brutus! pg100.txt +brutus, pg100.txt +brutus- pg100.txt +brutus. pg100.txt +brutus; pg100.txt +brutus? pg100.txt +brutus] pg100.txt +bryan--they pg3200.txt +bryant. pg3200.txt +bryerson, pg3200.txt +bu'st pg3200.txt +bubble pg3200.txt, pg100.txt +bubble, pg100.txt +bubble. pg3200.txt, pg100.txt +bucanier pg3200.txt +buccaneer? pg3200.txt +buccaneers pg3200.txt +buccaneers." pg3200.txt +buccaneers.' pg3200.txt +buccleugh pg3200.txt +buch. pg3200.txt +buchanan's pg3200.txt +buchu;" pg3200.txt +buck pg3200.txt +buck! pg100.txt +buck!" pg3200.txt +buck!' pg3200.txt +buck's pg3200.txt +buck, pg3200.txt +buck-basket! pg100.txt +buck-basket- pg100.txt +buck-basket. pg100.txt +buck-washing. pg100.txt +buck. pg3200.txt +buck." pg3200.txt +buck; pg3200.txt, pg100.txt +buck?" pg3200.txt +buck?--land?" pg3200.txt +bucker--i pg3200.txt +bucket pg3200.txt +bucket--" pg3200.txt +bucket. pg3200.txt, pg100.txt +buckets pg3200.txt +buckhounds! pg3200.txt +buckingham pg100.txt +buckingham! pg100.txt +buckingham, pg100.txt +buckingham- pg100.txt +buckingham. pg100.txt +buckingham; pg100.txt +buckingham? pg100.txt +buckle pg3200.txt +buckler pg100.txt +bucklers. pg100.txt +buckles, pg100.txt +buckles. pg3200.txt +buckner's pg3200.txt +buckner, pg3200.txt +buckner. pg3200.txt +buckner; pg3200.txt +buckram pg100.txt +buckram? pg100.txt +bucks pg3200.txt +bucksheesh pg3200.txt +bucksheesh!" pg3200.txt +bucksheesh, pg3200.txt +bucksheesh. pg3200.txt +buckshot, pg3200.txt +buckskin pg3200.txt +buckstone pg3200.txt +buckstone, pg3200.txt +buckstone--tom pg3200.txt +buckstone. pg3200.txt +buckstone." pg3200.txt +buckstone: pg3200.txt +bud pg3200.txt, pg100.txt +bud, pg3200.txt, pg100.txt +bud. pg100.txt +bud: pg100.txt +budded pg3200.txt +buddhist, pg3200.txt +buddhists, pg3200.txt +budding pg3200.txt +budge pg3200.txt, pg100.txt +budge, pg3200.txt +budge. pg3200.txt, pg100.txt +budge.' pg3200.txt +budged, pg3200.txt +budged." pg3200.txt +budget, pg100.txt +budget-- pg3200.txt +buds pg3200.txt, pg100.txt +buds, pg100.txt +buds; pg100.txt +buff pg3200.txt +buff-grey pg3200.txt +buff; pg3200.txt, pg100.txt +buffalo pg3200.txt +buffalo! pg3200.txt +buffalo. pg3200.txt +buffet pg100.txt +buffets pg100.txt +bug pg3200.txt +bug, pg3200.txt +bug. pg3200.txt +bug." pg3200.txt +buggies, pg3200.txt +buggy pg3200.txt +buggy. pg3200.txt +buggy; pg3200.txt +buggy] pg3200.txt +bugle pg3200.txt +bugle-call: pg3200.txt +bugle-calls. pg3200.txt +bugle-note. pg3200.txt +bugler, pg3200.txt +bugles pg3200.txt +bugles, pg3200.txt +bugles. pg3200.txt +bugs pg3200.txt +bugs. pg100.txt +bugs; pg3200.txt +build pg31100.txt, pg3200.txt, pg100.txt +build, pg100.txt +build. pg3200.txt +build: pg100.txt +build?" pg3200.txt +builded pg3200.txt, pg100.txt +building pg31100.txt, pg3200.txt +building, pg31100.txt, pg3200.txt +building-- pg3200.txt +building--massive pg3200.txt +building. pg3200.txt, pg100.txt +building." pg3200.txt +building; pg3200.txt +building?" pg31100.txt +buildings pg31100.txt, pg3200.txt +buildings! pg31100.txt +buildings, pg31100.txt, pg3200.txt, pg100.txt +buildings. pg31100.txt, pg3200.txt, pg100.txt +builds pg3200.txt +built pg31100.txt, pg3200.txt, pg100.txt +built, pg31100.txt, pg3200.txt +built. pg3200.txt +built." pg3200.txt +bulb pg3200.txt +bulge pg3200.txt +bulged pg3200.txt +bulging pg3200.txt +bulk pg3200.txt, pg100.txt +bulk! pg100.txt +bulk, pg3200.txt, pg100.txt +bulk. pg3200.txt +bulk; pg3200.txt +bulkhead pg3200.txt +bulkhead, pg3200.txt +bull pg3200.txt, pg100.txt +bull! pg100.txt +bull!" pg3200.txt +bull's pg3200.txt +bull, pg3200.txt, pg100.txt +bull-fight." pg3200.txt +bull-fights pg3200.txt +bull-headed pg3200.txt +bull. pg100.txt +bull? pg3200.txt +bull?" pg3200.txt +bullcalf pg100.txt +bullcalf, pg100.txt +bullcalf. pg100.txt +bulldog pg3200.txt +bullen pg100.txt +bullen! pg100.txt +bullen's pg100.txt +bullen, pg100.txt +bullen.' pg100.txt +bullen] pg100.txt +bullet pg3200.txt +bullet, pg3200.txt +bullet-hole! pg3200.txt +bullet. pg3200.txt +bullet." pg3200.txt +bulletin-boarder pg3200.txt +bulletin-boards pg3200.txt +bullets pg3200.txt, pg100.txt +bullets. pg100.txt +bullets." pg3200.txt +bullets; pg3200.txt +bullfrog pg3200.txt +bullfrogs pg3200.txt +bullier pg3200.txt +bullies. pg3200.txt +bulliest pg3200.txt +bullion pg3200.txt +bullock pg3200.txt +bullring. pg3200.txt +bulls pg31100.txt +bulls. pg100.txt +bulls; pg100.txt +bully pg3200.txt +bully, pg3200.txt +bully. pg3200.txt, pg100.txt +bully." pg3200.txt +bully; pg3200.txt +bullyragged pg3200.txt +bulmer- pg100.txt +bulwark pg3200.txt +bulwark, pg3200.txt +bulwarks pg100.txt +bulwarks. pg100.txt +bumblebees. pg3200.txt +bump pg3200.txt +bumppo pg3200.txt +bumptiousness. pg3200.txt +bums! pg100.txt +bunch pg3200.txt +bunch, pg3200.txt +bunch. pg3200.txt +bunched pg3200.txt +bunches pg3200.txt +bundelkand pg3200.txt +bundle pg3200.txt +bundle." pg3200.txt +bundle] pg100.txt +bundles pg3200.txt +bundles, pg3200.txt +bundwa pg3200.txt +bung. pg3200.txt +bungalow pg3200.txt +bunghole? pg100.txt +bunker pg3200.txt +bunks, pg3200.txt +bunks. pg3200.txt +bunting. pg100.txt +bunyan's pg3200.txt +buoy pg3200.txt, pg100.txt +buoy'd pg100.txt +buoy- pg3200.txt +buoy; pg3200.txt +buoyant pg3200.txt +buoyant, pg3200.txt +buoyantly pg3200.txt +burbank. pg3200.txt +burden pg3200.txt, pg100.txt +burden, pg3200.txt, pg100.txt +burden. pg3200.txt, pg100.txt +burden:] pg100.txt +burdened pg3200.txt, pg100.txt +burdens pg3200.txt +burdens, pg100.txt +burdensome pg3200.txt +burdensome. pg3200.txt +bureaus pg3200.txt +buren. pg3200.txt +burey, pg3200.txt +burg pg3200.txt +burgermeister pg3200.txt +burgess pg3200.txt +burgess!" pg3200.txt +burgess's pg3200.txt +burgess's?' pg3200.txt +burgess. pg3200.txt +burgess." pg3200.txt +burgess: pg3200.txt +burgess? pg3200.txt +burgh pg100.txt +burglar pg3200.txt +burglar!" pg3200.txt +burglar, pg3200.txt +burglar-time." pg3200.txt +burglaring pg3200.txt +burglars pg3200.txt +burglars, pg3200.txt +burglars. pg3200.txt +burglary pg3200.txt +burglary," pg3200.txt +burgomaster. pg3200.txt +burgonet, pg100.txt +burgundian pg3200.txt +burgundians pg3200.txt +burgundy pg3200.txt, pg100.txt +burgundy! pg100.txt +burgundy, pg3200.txt, pg100.txt +burgundy. pg3200.txt, pg100.txt +burgundy." pg3200.txt +burgundy; pg100.txt +burgundy? pg3200.txt, pg100.txt +burial pg3200.txt +burial, pg3200.txt, pg100.txt +burial-grounds pg3200.txt +burial. pg100.txt +burial; pg3200.txt, pg100.txt +burials pg3200.txt +buried pg3200.txt, pg100.txt +buried! pg100.txt +buried, pg3200.txt, pg100.txt +buried- pg100.txt +buried. pg3200.txt, pg100.txt +burke, pg3200.txt +burlesque pg3200.txt +burlesque. pg31100.txt, pg3200.txt +burlesqued pg3200.txt +burlesqued. pg3200.txt +burlesques pg3200.txt +burley. pg3200.txt +burley; pg3200.txt +burlingame, pg3200.txt +burly pg3200.txt +burn pg3200.txt, pg100.txt +burn'd?' pg100.txt +burn, pg3200.txt, pg100.txt +burn. pg3200.txt, pg100.txt +burn." pg3200.txt +burn: pg100.txt +burn; pg3200.txt +burnaby, pg3200.txt +burned pg3200.txt +burned, pg3200.txt, pg100.txt +burned--and pg3200.txt +burned. pg3200.txt +burned." pg3200.txt +burned; pg3200.txt +burning pg3200.txt, pg100.txt +burning, pg3200.txt, pg100.txt +burning-ghat. pg3200.txt +burning. pg3200.txt, pg100.txt +burning." pg3200.txt +burning; pg100.txt +burns pg3200.txt, pg100.txt +burns, pg3200.txt, pg100.txt +burns. pg100.txt +burns." pg3200.txt +burnt pg3200.txt, pg100.txt +burnt!" pg3200.txt +burnt, pg3200.txt, pg100.txt +burnt-in, pg3200.txt +burnt. pg3200.txt, pg100.txt +burnt; pg3200.txt, pg100.txt +burr pg3200.txt +burr. pg3200.txt +burr? pg3200.txt +burrow pg3200.txt +burs pg100.txt +burs, pg100.txt +burst pg31100.txt, pg3200.txt, pg100.txt +burst! pg3200.txt, pg100.txt +burst, pg3200.txt +burst. pg3200.txt, pg100.txt +burst? pg100.txt +bursted pg3200.txt +bursting pg3200.txt +bursts pg3200.txt +bursts, pg31100.txt +burthen! pg100.txt +burthen, pg3200.txt +burton pg3200.txt +bury pg31100.txt, pg3200.txt, pg100.txt +bury, pg3200.txt, pg100.txt +buryin' pg3200.txt +buryin'-ground, pg3200.txt +burying pg31100.txt, pg3200.txt +burying--" pg3200.txt +burying-place, pg3200.txt +burying. pg3200.txt +burying?" pg3200.txt +bush pg3200.txt, pg100.txt +bush, pg100.txt +bush. pg100.txt +bush." pg3200.txt +bush; pg100.txt +bushel pg3200.txt +bushel, pg3200.txt +bushel." pg3200.txt +bushels pg3200.txt +bushes pg3200.txt +bushes, pg3200.txt +bushes--turned pg3200.txt +bushes." pg3200.txt +bushes; pg100.txt +bushnell. pg3200.txt +bushy pg3200.txt, pg100.txt +busied pg100.txt +busine&s. pg3200.txt +busines pg100.txt +business pg31100.txt, pg3200.txt, pg100.txt +business! pg3200.txt, pg100.txt +business!' pg100.txt +business, pg31100.txt, pg3200.txt, pg100.txt +business- pg100.txt +business--and pg3200.txt +business--there pg3200.txt +business--very pg3200.txt +business--yes." pg3200.txt +business-brain pg3200.txt +business-eye, pg3200.txt +business-head, pg3200.txt +business-talent pg3200.txt +business. pg31100.txt, pg3200.txt, pg100.txt +business." pg31100.txt, pg3200.txt +business.' pg3200.txt +business._] pg31100.txt +business: pg100.txt +business; pg31100.txt, pg3200.txt, pg100.txt +business? pg3200.txt, pg100.txt +business?" pg3200.txt +businesses, pg3200.txt +businesses; pg100.txt +businesslike pg3200.txt +businesslike, pg3200.txt +buskins, pg3200.txt +busses. pg100.txt +bust pg3200.txt +bust, pg3200.txt +bust. pg3200.txt +bust?" pg3200.txt +busted pg3200.txt +busted, pg3200.txt +busted." pg3200.txt +bustle pg31100.txt, pg3200.txt +bustle, pg31100.txt +bustled pg3200.txt +bustles pg31100.txt +bustling pg3200.txt +busts, pg3200.txt +busts. pg3200.txt +busy pg31100.txt, pg3200.txt, pg100.txt +busy, pg3200.txt +busy. pg3200.txt +busying pg3200.txt +but's pg3200.txt +but, pg31100.txt, pg3200.txt, pg100.txt +but- pg100.txt +but-- pg3200.txt +but--" pg31100.txt, pg3200.txt +but--' pg3200.txt +but----" pg3200.txt +but--but-- pg3200.txt +but--but--" pg3200.txt +but--but--we pg3200.txt +but--er--well, pg3200.txt +but--if pg3200.txt +but--it pg3200.txt +but--well, pg3200.txt +but-little pg3200.txt +butcheed, pg100.txt +butcher pg31100.txt, pg3200.txt +butcher!" pg3200.txt +butcher'd, pg100.txt +butcher'd. pg100.txt +butcher's pg31100.txt, pg3200.txt +butcher, pg100.txt +butcher- pg100.txt +butcher. pg100.txt +butcher." pg3200.txt +butchered. pg100.txt +butchered; pg100.txt +butcheries. pg100.txt +butcheries? pg3200.txt +butchering, pg3200.txt +butcherly, pg100.txt +butchers pg3200.txt +butchers! pg100.txt +butchers, pg3200.txt +butchery, pg100.txt +butchery. pg3200.txt +butchery; pg100.txt +butler pg31100.txt, pg3200.txt, pg100.txt +butler, pg31100.txt, pg3200.txt +butler: pg31100.txt +butler? pg100.txt +butlers, pg3200.txt +butt pg100.txt +butt-shaft; pg100.txt +butted, pg3200.txt +butter pg3200.txt +butter, pg31100.txt, pg3200.txt +butter-timber pg3200.txt +butter. pg3200.txt, pg100.txt +butter? pg100.txt +butter?" pg3200.txt +butterflies pg3200.txt +butterflies, pg3200.txt, pg100.txt +butterflies. pg3200.txt +butterfly, pg3200.txt +butterfly. pg3200.txt +buttermilk, pg3200.txt +buttermilk--that pg3200.txt +butters pg3200.txt +butters--i pg3200.txt +buttery, pg100.txt +buttock. pg100.txt +button pg3200.txt +button, pg3200.txt +button-holes; pg3200.txt +button. pg3200.txt, pg100.txt +button; pg3200.txt +buttonhole pg3200.txt +buttonhole- pg3200.txt +buttonhole. pg3200.txt +buttonholed pg3200.txt +buttons pg3200.txt +buttons, pg3200.txt +buttons. pg3200.txt +buttons." pg3200.txt +buttons; pg3200.txt +buttressed pg3200.txt +butts pg100.txt +butts, pg100.txt +butts? pg100.txt +buxom pg3200.txt +buy pg31100.txt, pg3200.txt, pg100.txt +buy'em? pg100.txt +buy, pg3200.txt, pg100.txt +buy. pg3200.txt, pg100.txt +buy; pg100.txt +buy? pg3200.txt +buy?" pg3200.txt +buyer pg3200.txt +buying pg31100.txt, pg3200.txt +buyings, pg31100.txt +buys pg3200.txt, pg100.txt +buzz pg3200.txt +buzz! pg100.txt +buzzard pg3200.txt +buzzard's pg3200.txt +buzzard's. pg3200.txt +buzzard. pg3200.txt, pg100.txt +buzzard; pg3200.txt +buzzed pg3200.txt +buzzing pg3200.txt +buzzing, pg3200.txt +by! pg3200.txt, pg100.txt +by't. pg100.txt +by, pg31100.txt, pg3200.txt, pg100.txt +by," pg3200.txt +by,' pg100.txt +by- pg3200.txt, pg100.txt +by-- pg3200.txt +by--, pg3200.txt +by--before pg3200.txt +by--but pg31100.txt +by--such pg3200.txt +by--will pg3200.txt +by-and-by pg3200.txt +by-and-by, pg3200.txt +by-and-by. pg100.txt +by-and-by; pg100.txt +by-and-bye pg3200.txt +by-and-bye, pg3200.txt +by-dependences, pg100.txt +by-law pg3200.txt +by-law, pg3200.txt +by-laws pg3200.txt +by-laws) pg3200.txt +by-laws, pg3200.txt +by-laws. pg3200.txt +by-play pg3200.txt +by-the-by, pg31100.txt +by-the-bye, pg31100.txt +by-trips pg3200.txt +by-ways, pg3200.txt +by. pg31100.txt, pg3200.txt, pg100.txt +by." pg31100.txt, pg3200.txt +by: pg3200.txt, pg100.txt +by; pg3200.txt, pg100.txt +by? pg3200.txt, pg100.txt +by?" pg31100.txt, pg3200.txt +bye pg3200.txt, pg100.txt +bye, pg31100.txt, pg3200.txt +bye," pg31100.txt +bye-bye! pg3200.txt +bye. pg3200.txt +bye." pg31100.txt +bygone pg3200.txt +bygones pg3200.txt +bygones. pg3200.txt +bylaw pg3200.txt +bymeby." pg3200.txt +bynner!" pg3200.txt +byragee. pg3200.txt +byranijee, pg3200.txt +byron." pg3200.txt +bystanders pg31100.txt +bystanders. pg3200.txt +bytimes pg3200.txt +byzantina, pg3200.txt +byzantium pg100.txt +bzjxxllwep pg3200.txt +bzzz-zzz pg3200.txt +c, pg3200.txt +c. pg3200.txt +c." pg31100.txt +c.' pg3200.txt +c., pg31100.txt +c.,89 pg3200.txt +c.: pg3200.txt +c.?" pg3200.txt +c.l. pg31100.txt +c.s. pg3200.txt +ca'm pg3200.txt +ca'm, pg3200.txt +ca'm. pg3200.txt +ca'm: pg3200.txt +ca, pg3200.txt +ca-caliban, pg100.txt +cab pg3200.txt +cab-driver pg3200.txt +cab-horses pg3200.txt +cab-substitutes pg3200.txt +cabbage pg3200.txt +cabbage--" pg3200.txt +cabbage. pg3200.txt +cabbaged pg3200.txt +cabbages, pg3200.txt +cabbages. pg3200.txt +cabbaging pg3200.txt +cabin pg3200.txt +cabin). pg3200.txt +cabin, pg3200.txt, pg100.txt +cabin-- pg3200.txt +cabin. pg31100.txt, pg3200.txt, pg100.txt +cabin: pg3200.txt +cabin; pg3200.txt +cabin?" pg3200.txt +cabinet pg3200.txt +cabinet's pg3200.txt +cabinet, pg3200.txt +cabinet. pg3200.txt +cabinets pg3200.txt +cabinets, pg31100.txt +cabins pg3200.txt +cabins, pg3200.txt +cabins--with pg3200.txt +cable pg3200.txt +cable-chains pg3200.txt +cable. pg3200.txt, pg100.txt +cable; pg100.txt +cabled pg3200.txt +cablegram pg3200.txt +cablegram! pg3200.txt +cablegram!" pg3200.txt +cablegram, pg3200.txt +cablegram--" pg3200.txt +cablegram--one pg3200.txt +cables pg3200.txt +cabling pg3200.txt +cabman pg3200.txt +cabs pg3200.txt +cabs, pg3200.txt +cackle pg3200.txt +cackles pg3200.txt +cackles. pg3200.txt +cackling. pg3200.txt +cactus pg3200.txt +cadaver pg3200.txt +cadaver. pg3200.txt +caddis-garter, pg100.txt +cade pg100.txt +cade! pg100.txt +cade, pg100.txt +cade; pg100.txt +cadences, pg3200.txt +cadesses? pg3200.txt +cadet pg3200.txt +cadet, pg3200.txt +cadge pg3200.txt +cadiz, pg3200.txt +cadiz--" pg3200.txt +cadiz. pg3200.txt +cadwal pg100.txt +cadwal, pg100.txt +caelo, pg100.txt +caesar pg3200.txt, pg100.txt +caesar! pg100.txt +caesar!" pg100.txt +caesar!'] pg100.txt +caesar's pg100.txt +caesar's, pg100.txt +caesar's. pg100.txt +caesar, pg3200.txt, pg100.txt +caesar- pg100.txt +caesar. pg3200.txt, pg100.txt +caesar: pg100.txt +caesar; pg100.txt +caesar? pg100.txt +caesars pg100.txt +cafe.] pg3200.txt +cage pg3200.txt, pg100.txt +cage, pg3200.txt, pg100.txt +cage. pg100.txt +cage?" pg3200.txt +cain pg3200.txt, pg100.txt +cain" pg3200.txt +cain't pg3200.txt +cain, pg100.txt +cain. pg3200.txt +cain." pg3200.txt +caiques, pg3200.txt +cairo pg3200.txt +cairo, pg3200.txt +cairo,--whence pg3200.txt +cairo--shepheard's pg3200.txt +cairo. pg3200.txt +cairo.' pg3200.txt +cairo?" pg3200.txt +caithness, pg100.txt +caitiff! pg100.txt +caitiff. pg100.txt +caitiff? pg100.txt +caius pg100.txt +caius! pg100.txt +caius, pg100.txt +caius. pg100.txt +caius? pg100.txt +cake pg3200.txt, pg100.txt +cake, pg3200.txt +cake-pans. pg3200.txt +cake-walk: pg3200.txt +cake. pg3200.txt +cake." pg3200.txt +caked pg3200.txt +cakes--for pg3200.txt +cakes. pg3200.txt, pg100.txt +cakewalk. pg3200.txt +cal-e-va-ras] pg3200.txt +cal." pg3200.txt +calaboose pg3200.txt +calaboose?' pg3200.txt +calais pg100.txt +calais, pg100.txt +calais. pg100.txt +calais; pg100.txt +calamities. pg100.txt +calamitous pg3200.txt +calamity pg3200.txt, pg100.txt +calamity!" pg3200.txt +calamity, pg3200.txt, pg100.txt +calamity. pg3200.txt, pg100.txt +calamity." pg3200.txt +calamity; pg3200.txt +calamity?" pg3200.txt +calamus pg3200.txt +calaveras pg3200.txt +calaveras. pg3200.txt +calaveras." pg3200.txt +calaveras.' pg3200.txt +calchas pg100.txt +calculate pg3200.txt +calculate, pg31100.txt, pg3200.txt, pg100.txt +calculated pg31100.txt, pg3200.txt +calculated, pg3200.txt +calculating, pg3200.txt +calculating--by pg3200.txt +calculation, pg31100.txt +calculation. pg31100.txt, pg3200.txt +calculations pg31100.txt, pg3200.txt +calculations, pg31100.txt, pg3200.txt +calculations. pg31100.txt, pg3200.txt +calculations; pg3200.txt +calcutta pg3200.txt +calcutta, pg3200.txt +calcutta. pg3200.txt +caledonia), pg3200.txt +calendar pg3200.txt +calendar! pg100.txt +calendar. pg3200.txt, pg100.txt +calendar? pg100.txt +calf pg3200.txt +calf, pg100.txt +calf. pg3200.txt, pg100.txt +calf; pg3200.txt +calf? pg100.txt +calhoun, pg3200.txt +caliban pg100.txt +caliban! pg100.txt +caliban, pg100.txt +caliban. pg100.txt +calibans. pg100.txt +calibre pg3200.txt +calico pg3200.txt +calico. pg3200.txt +california pg3200.txt +california" pg3200.txt +california, pg3200.txt +california--and pg3200.txt +california. pg3200.txt +california." pg3200.txt +california: pg3200.txt +californian pg3200.txt +californian, pg3200.txt +californian. pg3200.txt +californian.' pg3200.txt +californians, pg3200.txt +calipolis. pg100.txt +caliver pg100.txt +calk pg3200.txt +call pg31100.txt, pg3200.txt, pg100.txt +call! pg100.txt +call'd pg100.txt +call'd, pg100.txt +call'd. pg100.txt +call'd? pg100.txt +call, pg31100.txt, pg3200.txt, pg100.txt +call--one--two--" pg3200.txt +call-boy; pg3200.txt +call-boying pg3200.txt +call. pg31100.txt, pg3200.txt, pg100.txt +call." pg3200.txt +call: pg3200.txt +call; pg31100.txt, pg3200.txt, pg100.txt +call? pg100.txt +call?" pg31100.txt, pg3200.txt +call] pg3200.txt +calla. pg3200.txt +callat pg100.txt +called pg31100.txt, pg3200.txt, pg100.txt +called, pg31100.txt, pg3200.txt, pg100.txt +called-- pg3200.txt +called. pg3200.txt +called." pg31100.txt, pg3200.txt +called: pg3200.txt +called; pg3200.txt +called?" pg3200.txt +callers. pg3200.txt +callet. pg100.txt +calling pg31100.txt, pg3200.txt, pg100.txt +calling. pg31100.txt, pg3200.txt +calling." pg31100.txt +calling? pg100.txt +calling?" pg3200.txt +callinger. pg3200.txt +callings pg3200.txt +callings, pg3200.txt +callow pg3200.txt +calls pg31100.txt, pg3200.txt +calls! pg100.txt +calls, pg31100.txt, pg100.txt +calls- pg100.txt +calls. pg31100.txt, pg3200.txt, pg100.txt +calls: pg100.txt +calls; pg100.txt +calls? pg100.txt +calm pg31100.txt, pg3200.txt +calm'd pg100.txt +calm'd; pg100.txt +calm, pg31100.txt, pg3200.txt, pg100.txt +calm--what pg3200.txt +calm. pg31100.txt, pg3200.txt, pg100.txt +calm." pg3200.txt +calmer pg3200.txt +calmer. pg31100.txt +calmest pg3200.txt +calmly pg31100.txt, pg3200.txt +calmly, pg31100.txt +calmly. pg31100.txt, pg3200.txt +calmly: pg3200.txt +calmness pg31100.txt, pg3200.txt +calmness, pg31100.txt +calmness. pg31100.txt, pg3200.txt +calmness: pg3200.txt +calmness; pg31100.txt +calms pg3200.txt +calms, pg100.txt +calms,--that pg3200.txt +calpurnia! pg100.txt +calpurnia. pg100.txt +calumniate. pg100.txt +calumnies?" pg3200.txt +calumny pg100.txt +calvary, pg3200.txt +calvin, pg3200.txt +calvinists, pg3200.txt +calydon. pg100.txt +cam'st pg100.txt +cam'st, pg100.txt +cam'st- pg100.txt +cambenet, pg3200.txt +cambio pg100.txt +cambio. pg100.txt +cambio? pg100.txt +cambrics, pg100.txt +cambridge pg3200.txt +cambridge, pg31100.txt, pg100.txt +cambridge. pg3200.txt, pg100.txt +camden pg31100.txt +came pg31100.txt, pg3200.txt, pg100.txt +came!" pg3200.txt +came, pg31100.txt, pg3200.txt, pg100.txt +came--and pg3200.txt +came--i pg31100.txt +came--one pg3200.txt +came. pg31100.txt, pg3200.txt, pg100.txt +came." pg3200.txt +came.) pg3200.txt +came: pg3200.txt +came; pg31100.txt, pg3200.txt, pg100.txt +came? pg3200.txt, pg100.txt +came?- pg100.txt +camel pg3200.txt, pg100.txt +camel! pg100.txt +camel's pg3200.txt +camel's, pg3200.txt +camel's. pg3200.txt +camel, pg3200.txt +camel-driver pg3200.txt +camel-driver. pg3200.txt +camel-refuse, pg3200.txt +camel. pg3200.txt +camel? pg100.txt +camel?" pg3200.txt +cameleopard, pg3200.txt +camellia pg3200.txt +camelot pg3200.txt +camelot, pg3200.txt +camelot. pg3200.txt, pg100.txt +camelot." pg3200.txt +camelot; pg3200.txt +camels pg3200.txt +camels, pg3200.txt +camels--but pg3200.txt +camels. pg3200.txt +camels." pg3200.txt +camels?" pg3200.txt +camilla?" pg31100.txt +camillo pg100.txt +camillo, pg100.txt +camillo- pg100.txt +camillo. pg100.txt +camillo; pg100.txt +camillo? pg100.txt +camillo] pg100.txt +camp pg3200.txt, pg100.txt +camp, pg3200.txt, pg100.txt +camp- pg100.txt +camp--" pg3200.txt +camp--in pg3200.txt +camp-chairs pg3200.txt +camp-fire pg3200.txt +camp-fire, pg3200.txt +camp-fire. pg3200.txt +camp-meetin's, pg3200.txt +camp-meeting pg3200.txt +camp-meeting. pg3200.txt +camp. pg3200.txt, pg100.txt +camp." pg3200.txt +camp; pg3200.txt, pg100.txt +camp? pg100.txt +camp?" pg3200.txt +campagna pg3200.txt +campaign pg3200.txt +campaign, pg3200.txt +campaign. pg3200.txt +campaigner pg3200.txt +campaigners, pg3200.txt +campaigns pg3200.txt +campaigns, pg3200.txt +campanile pg3200.txt +campbell pg31100.txt, pg3200.txt +campbell, pg31100.txt +campbell. pg31100.txt, pg3200.txt +campbell." pg31100.txt +campbell.'" pg31100.txt +campbell; pg31100.txt +campbells pg31100.txt +campbells, pg31100.txt +campbells." pg31100.txt +camped pg3200.txt +camped, pg3200.txt +camped?" pg3200.txt +campeius pg100.txt +campeius, pg100.txt +campeius; pg100.txt +camping pg3200.txt +camping- pg3200.txt +camping-place. pg3200.txt +camps pg3200.txt, pg100.txt +camps, pg3200.txt +camps. pg100.txt +camps; pg3200.txt +campwards, pg3200.txt +can!' pg3200.txt +can't pg31100.txt, pg3200.txt +can't!" pg3200.txt +can't, pg3200.txt +can't--as pg3200.txt +can't--not pg3200.txt +can't. pg3200.txt +can't." pg3200.txt +can't.' pg3200.txt +can't? pg3200.txt +can, pg31100.txt, pg3200.txt, pg100.txt +can," pg3200.txt +can--" pg31100.txt, pg3200.txt +can-but pg100.txt +can. pg31100.txt, pg3200.txt, pg100.txt +can." pg31100.txt, pg3200.txt +can.' pg3200.txt +can: pg3200.txt, pg100.txt +can; pg31100.txt, pg3200.txt, pg100.txt +can? pg3200.txt +can?" pg3200.txt +can?), pg31100.txt +canaan pg3200.txt +canaanites. pg3200.txt +canada pg3200.txt +canada, pg3200.txt +canada. pg3200.txt +canada: pg3200.txt +canadian pg3200.txt +canadian, pg3200.txt +canal pg3200.txt +canal. pg3200.txt +canals. pg3200.txt +canaries; pg3200.txt +canary pg3200.txt, pg100.txt +canary! pg100.txt +canceled, pg3200.txt +cancelled pg3200.txt +cancer pg3200.txt +cancer, pg3200.txt +candian pg3200.txt +candid pg3200.txt +candid. pg31100.txt +candidate pg3200.txt +candidate, pg3200.txt +candidate. pg3200.txt +candidate; pg3200.txt +candidates pg3200.txt +candidates, pg3200.txt +candidates. pg3200.txt +candied pg100.txt +candle pg31100.txt, pg3200.txt +candle! pg100.txt +candle!" pg3200.txt +candle, pg3200.txt +candle-boxes, pg3200.txt +candle-end, pg3200.txt +candle-grease pg3200.txt +candle-light. pg3200.txt +candle-mould. pg3200.txt +candle-smoke." pg3200.txt +candle-stick pg3200.txt +candle. pg3200.txt, pg100.txt +candle; pg3200.txt +candlelight, pg31100.txt +candlelight. pg31100.txt, pg3200.txt +candles pg31100.txt, pg3200.txt +candles' pg100.txt +candles, pg3200.txt +candles. pg3200.txt +candles?' pg3200.txt +candlesticks pg3200.txt, pg100.txt +candlesticks, pg3200.txt +candor, pg3200.txt +candor: pg3200.txt +candor; pg3200.txt +candour pg31100.txt +candour, pg31100.txt +candour-- pg31100.txt +candy pg3200.txt +candy, pg3200.txt +candy-shop pg3200.txt +candy. pg3200.txt +candy; pg100.txt +cane pg3200.txt +cane, pg3200.txt +cane- pg3200.txt +cane-brakes. pg3200.txt +cane-brakes; pg3200.txt +cane. pg3200.txt +canes, pg3200.txt +canes; pg3200.txt +canidius pg100.txt +canidius, pg100.txt +canidius. pg100.txt +canker-bit. pg100.txt +cankerblossom! pg100.txt +cankered, pg3200.txt +cankers! pg100.txt +canned. pg3200.txt +cannell-bone, pg3200.txt +cannibal. pg3200.txt +cannibals pg3200.txt, pg100.txt +cannibals! pg100.txt +cannibals, pg100.txt +cannon pg3200.txt, pg100.txt +cannon's pg3200.txt +cannon's. pg3200.txt +cannon, pg3200.txt, pg100.txt +cannon-ball pg3200.txt +cannon-balls pg3200.txt +cannon-shot pg100.txt +cannon. pg3200.txt +cannon?" pg3200.txt +cannonade pg3200.txt +cannonblast, pg3200.txt +cannons, pg100.txt +cannot pg31100.txt, pg3200.txt, pg100.txt +cannot!" pg3200.txt +cannot, pg31100.txt, pg3200.txt, pg100.txt +cannot- pg100.txt +cannot--for pg3200.txt +cannot. pg31100.txt, pg3200.txt, pg100.txt +cannot." pg31100.txt, pg3200.txt +canoe pg3200.txt +canoe, pg3200.txt +canoe. pg3200.txt +canoe." pg3200.txt +canoe; pg3200.txt +canoe?" pg3200.txt +canoes, pg3200.txt +canoes--but pg3200.txt +canon pg3200.txt +canon, pg3200.txt, pg100.txt +canon," pg3200.txt +canon. pg100.txt +canonicals. pg3200.txt +canonization, pg3200.txt +canonized. pg3200.txt +canons pg3200.txt +canons, pg3200.txt +canopied pg3200.txt, pg100.txt +canopies pg3200.txt +canopies, pg100.txt +canopy pg100.txt +canopy, pg100.txt +canopy. pg100.txt +canopy? pg100.txt +canova pg3200.txt +cans pg3200.txt +cans, pg3200.txt +canst pg3200.txt, pg100.txt +canst, pg100.txt +canst. pg100.txt +canst; pg100.txt +cant pg3200.txt +canted pg3200.txt +canteens pg3200.txt +canter, pg3200.txt +canterbury pg3200.txt, pg100.txt +canterbury's. pg3200.txt +canterbury, pg100.txt +canterbury. pg3200.txt, pg100.txt +canterbury; pg100.txt +canterbury? pg100.txt +cantering pg3200.txt +canton pg3200.txt +canton. pg3200.txt +canton." pg3200.txt +cantons, pg3200.txt +cantons." pg3200.txt +canty pg3200.txt +canty, pg3200.txt +canty--with pg3200.txt +canty. pg3200.txt +canus; pg100.txt +canvas pg3200.txt +canvas, pg3200.txt +canvas--then pg3200.txt +canvas-backs pg3200.txt +canvas-box pg3200.txt +canvas-covered pg3200.txt +canvas. pg3200.txt +canvases pg3200.txt +canvases, pg3200.txt +canvass pg100.txt +canvass, pg3200.txt +canvass. pg3200.txt +canvassed pg31100.txt, pg3200.txt +canvasser. pg3200.txt +canvassers, pg3200.txt +canvassing pg31100.txt +canyon pg3200.txt +canyon, pg3200.txt +canyons, pg3200.txt +cap pg3200.txt, pg100.txt +cap'n pg3200.txt +cap, pg31100.txt, pg3200.txt, pg100.txt +cap-a-pe, pg100.txt +cap. pg3200.txt, pg100.txt +cap., pg3200.txt +cap.] pg100.txt +cap; pg3200.txt, pg100.txt +cap? pg100.txt +capabilities pg31100.txt, pg3200.txt +capabilities, pg31100.txt +capability pg31100.txt +capable pg31100.txt, pg3200.txt, pg100.txt +capable, pg3200.txt +capable--whether pg31100.txt +capable. pg3200.txt, pg100.txt +capable; pg3200.txt +capablest pg3200.txt +capably, pg3200.txt +capacities pg3200.txt, pg100.txt +capacities, pg31100.txt, pg3200.txt +capacities; pg3200.txt +capacity pg3200.txt, pg100.txt +capacity, pg3200.txt +capacity. pg3200.txt, pg100.txt +capacity.' pg3200.txt +caparison'd pg100.txt +caparison. pg100.txt +cape pg3200.txt +cape'- pg100.txt +cape, pg31100.txt, pg3200.txt, pg100.txt +cape. pg100.txt +capello--'" pg3200.txt +capello." pg3200.txt +caper pg100.txt +caper, pg31100.txt +caper. pg100.txt +capered pg3200.txt +capernaum pg3200.txt +capernaum--concerning pg3200.txt +capernaum. pg3200.txt +capernaum; pg3200.txt +capers, pg100.txt +capers; pg100.txt +capes pg3200.txt +capes; pg3200.txt +capet, pg100.txt +caphis pg100.txt +capilet. pg100.txt +capilet.' pg100.txt +capital pg31100.txt, pg3200.txt, pg100.txt +capital, pg3200.txt +capital. pg31100.txt, pg3200.txt +capital?" pg3200.txt +capitalist. pg3200.txt +capitals pg3200.txt +capitol pg3200.txt, pg100.txt +capitol! pg100.txt +capitol, pg3200.txt, pg100.txt +capitol. pg3200.txt, pg100.txt +capitol." pg3200.txt +capitol.--thus pg3200.txt +capitol; pg100.txt +capitol? pg100.txt +capitulate pg100.txt +capon. pg100.txt +capons, pg100.txt +caprell. pg3200.txt +caprice pg3200.txt +caprice, pg3200.txt +caprice; pg3200.txt +caprices pg3200.txt +caprices. pg3200.txt +capricious pg3200.txt +caps pg3200.txt, pg100.txt +caps, pg3200.txt, pg100.txt +caps--where pg3200.txt +caps. pg31100.txt, pg3200.txt +caps; pg3200.txt, pg100.txt +caps] pg100.txt +capsize, pg3200.txt +capstan, pg3200.txt +capt. pg3200.txt +captain pg31100.txt, pg3200.txt, pg100.txt +captain's pg3200.txt +captain) pg3200.txt +captain, pg3200.txt, pg100.txt +captain- pg100.txt +captain. pg3200.txt, pg100.txt +captain." pg3200.txt +captain.' pg3200.txt +captain: pg100.txt +captain; pg3200.txt, pg100.txt +captain? pg100.txt +captain?" pg3200.txt +captain?' pg3200.txt +captain]. pg100.txt +captains pg3200.txt, pg100.txt +captains, pg100.txt +captains. pg3200.txt +captains?" pg3200.txt +captious pg31100.txt +captiously pg31100.txt +captivate pg100.txt +captivate. pg100.txt +captivates! pg100.txt +captivating pg31100.txt, pg3200.txt +captive pg3200.txt, pg100.txt +captive, pg3200.txt +captive. pg3200.txt +captive; pg100.txt +captive? pg100.txt +captives pg100.txt +captives. pg3200.txt +captives; pg3200.txt +captivity pg3200.txt +captivity!" pg3200.txt +captivity, pg3200.txt, pg100.txt +captivity. pg3200.txt, pg100.txt +capture pg3200.txt +capture, pg3200.txt +capture; pg3200.txt +capture?" pg3200.txt +captured pg3200.txt +captured, pg3200.txt +captured--enjoying pg3200.txt +captured. pg3200.txt +captured; pg3200.txt +captures pg3200.txt +captures, pg31100.txt +capturing pg3200.txt +capucius pg100.txt +capucius. pg100.txt +capulet's pg100.txt +capulet, pg100.txt +capulet. pg100.txt +capulet; pg100.txt +capulet? pg100.txt +capulets. pg100.txt +capulets; pg100.txt +car pg3200.txt, pg100.txt +car'd pg100.txt +car, pg3200.txt, pg100.txt +car--enjoyed pg3200.txt +car. pg3200.txt, pg100.txt +car." pg3200.txt +car] pg100.txt +caracalla, pg3200.txt +carack; pg100.txt +caracks pg100.txt +carados pg3200.txt +carados, pg3200.txt +carat pg3200.txt +carat, pg100.txt +caravan pg3200.txt +caravan, pg3200.txt +caravan. pg3200.txt +caravans. pg3200.txt +caravanserai pg3200.txt +caravanserai, pg3200.txt +carbon pg3200.txt +carbonado'd. pg100.txt +carbonado. pg100.txt +carbuncle pg100.txt +carbuncled pg100.txt +carbuncles, pg3200.txt +carbuncles. pg3200.txt +carcanet, pg100.txt +carcanet. pg100.txt +carcases pg3200.txt +carcass pg3200.txt, pg100.txt +carcass?" pg3200.txt +card pg31100.txt, pg3200.txt, pg100.txt +card, pg3200.txt, pg100.txt +card--louisiana." pg3200.txt +card-parties. pg31100.txt +card-player." pg31100.txt +card-room. pg31100.txt +card-table pg31100.txt +card-table, pg31100.txt +card-table. pg31100.txt +card-tables pg31100.txt +card. pg31100.txt, pg3200.txt, pg100.txt +card?" pg3200.txt +cardecue pg100.txt +cardiff pg3200.txt +cardinal pg31100.txt, pg3200.txt, pg100.txt +cardinal! pg100.txt +cardinal's pg100.txt +cardinal's; pg100.txt +cardinal's? pg100.txt +cardinal, pg100.txt +cardinal- pg100.txt +cardinal. pg100.txt +cardinal; pg100.txt +cardinal? pg100.txt +cardinal] pg100.txt +cardinalate pg3200.txt +cardinals pg100.txt +cardinals, pg3200.txt, pg100.txt +cards pg31100.txt, pg3200.txt +cards! pg3200.txt +cards, pg31100.txt, pg3200.txt +cards--it's pg3200.txt +cards. pg31100.txt, pg3200.txt +cards." pg31100.txt, pg3200.txt +cards.) pg3200.txt +cards; pg31100.txt +cards?' pg3200.txt +care pg31100.txt, pg3200.txt, pg100.txt +care! pg3200.txt, pg100.txt +care!" pg3200.txt +care, pg31100.txt, pg3200.txt, pg100.txt +care-free pg3200.txt +care-worn pg3200.txt +care. pg31100.txt, pg3200.txt, pg100.txt +care." pg31100.txt, pg3200.txt +care.' pg100.txt +care.'" pg3200.txt +care; pg31100.txt, pg3200.txt, pg100.txt +care? pg3200.txt, pg100.txt +care?" pg3200.txt +cared pg31100.txt, pg3200.txt +cared, pg3200.txt +cared. pg3200.txt +career pg31100.txt, pg3200.txt, pg100.txt +career, pg3200.txt, pg100.txt +career. pg3200.txt +career." pg3200.txt +career; pg3200.txt +career? pg100.txt +career?" pg3200.txt +careering pg3200.txt +careers pg3200.txt +careers, pg3200.txt +careers. pg100.txt +carefree pg3200.txt +careful pg31100.txt, pg3200.txt, pg100.txt +careful, pg3200.txt +careful--for pg3200.txt +careful. pg3200.txt, pg100.txt +careful." pg3200.txt +carefully pg31100.txt, pg3200.txt, pg100.txt +carefully, pg3200.txt, pg100.txt +carefully. pg3200.txt, pg100.txt +careless pg31100.txt, pg3200.txt +careless, pg31100.txt, pg3200.txt +careless--careless pg3200.txt +carelessly pg31100.txt, pg3200.txt +carelessly)--2. pg3200.txt +carelessly, pg31100.txt, pg3200.txt, pg100.txt +carelessly: pg3200.txt +carelessness pg3200.txt +cares pg31100.txt, pg3200.txt, pg100.txt +cares, pg31100.txt, pg3200.txt, pg100.txt +cares. pg3200.txt, pg100.txt +cares." pg31100.txt +cares? pg100.txt +caress pg3200.txt +caress. pg3200.txt +caressed pg31100.txt, pg3200.txt +caresses. pg3200.txt +caressing pg31100.txt, pg3200.txt +caret. pg100.txt +cargo pg3200.txt +cargo. pg3200.txt, pg100.txt +cargoes pg3200.txt +caricature pg31100.txt, pg3200.txt +caricatures. pg3200.txt +caricaturists, pg3200.txt +caring pg31100.txt, pg3200.txt +caring. pg3200.txt +carl pg3200.txt +carl, pg100.txt +carleton; pg3200.txt +carlisle pg100.txt +carlisle, pg100.txt +carlisle. pg100.txt +carlo. pg3200.txt +carloads pg3200.txt +carlsbad pg3200.txt +carlton's pg3200.txt +carlyle pg3200.txt +carlyle, pg3200.txt +carlyle. pg3200.txt +carm pg3200.txt +carman. pg3200.txt +carmel, pg3200.txt +carmine pg3200.txt +carnage pg3200.txt +carnage, pg3200.txt +carnage; pg3200.txt +carnegie pg3200.txt +carnegie's pg3200.txt +carnegie, pg3200.txt +carnegie: pg3200.txt +carolina pg3200.txt +carolina) pg3200.txt +carolina? pg3200.txt +caroline pg31100.txt +caroline's pg31100.txt +caroline." pg3200.txt +carolino pg3200.txt +carolino. pg3200.txt +carous'd. pg100.txt +carouse. pg3200.txt +caroused pg3200.txt, pg100.txt +carpenter pg3200.txt, pg100.txt +carpenter, pg3200.txt +carpenter. pg3200.txt, pg100.txt +carpenter? pg100.txt +carpentering pg3200.txt +carper. pg100.txt +carpet pg3200.txt, pg100.txt +carpet, pg31100.txt, pg3200.txt +carpet-bags. pg3200.txt +carpet-pattern pg3200.txt +carpet-work. pg31100.txt +carpet. pg3200.txt +carpet." pg3200.txt +carpeted pg3200.txt +carpeted; pg3200.txt +carpeting pg3200.txt +carpetless pg3200.txt +carpets pg100.txt +carpets" pg3200.txt +carpets, pg3200.txt +carpets. pg3200.txt +carpets; pg3200.txt +carriage pg31100.txt, pg3200.txt, pg100.txt +carriage! pg31100.txt +carriage!--excellent pg31100.txt +carriage, pg31100.txt, pg3200.txt, pg100.txt +carriage," pg31100.txt +carriage--we pg31100.txt +carriage--when pg3200.txt +carriage-road pg3200.txt +carriage. pg31100.txt, pg3200.txt, pg100.txt +carriage." pg31100.txt +carriage.--it pg31100.txt +carriage.] pg3200.txt +carriage; pg31100.txt, pg3200.txt +carriage?" pg31100.txt, pg3200.txt +carriage?--who pg31100.txt +carriage] pg3200.txt +carriages pg31100.txt, pg3200.txt +carriages, pg31100.txt, pg3200.txt, pg100.txt +carriages,--impatient pg31100.txt +carriages. pg3200.txt +carriages; pg31100.txt +carriages? pg100.txt +carried pg31100.txt, pg3200.txt, pg100.txt +carried, pg3200.txt, pg100.txt +carried- pg100.txt +carried--first pg3200.txt +carried. pg31100.txt, pg3200.txt, pg100.txt +carrier pg3200.txt +carrier, pg3200.txt +carrier. pg100.txt +carrier? pg100.txt +carrier]. pg100.txt +carriers; pg100.txt +carries pg3200.txt, pg100.txt +carries: pg100.txt +carries?" pg31100.txt +carrieth pg3200.txt +carrion pg3200.txt +carrion, pg3200.txt +carrion-heap. pg3200.txt +carrion. pg100.txt +carrot, pg3200.txt +carry pg31100.txt, pg3200.txt, pg100.txt +carry, pg31100.txt, pg3200.txt +carry. pg3200.txt, pg100.txt +carry." pg3200.txt +carry? pg100.txt +carrying pg31100.txt, pg3200.txt, pg100.txt +carrying?" pg3200.txt +cars pg3200.txt +cars, pg3200.txt, pg100.txt +cars,' pg3200.txt +cars--and pg3200.txt +cars. pg3200.txt, pg100.txt +cars." pg3200.txt +cars; pg100.txt +carson pg3200.txt +carson, pg3200.txt +carson--the pg3200.txt +carson. pg3200.txt +cart pg3200.txt +cart-driver. pg3200.txt +cart-loads pg3200.txt +cart." pg3200.txt +cart?" pg31100.txt +carted pg3200.txt +carter pg31100.txt +carter, pg31100.txt +carteret, pg31100.txt +carters. pg100.txt +carthage. pg100.txt +carthage? pg100.txt +cartridge pg3200.txt +carts pg3200.txt +carts, pg100.txt +carts. pg31100.txt +caruthers pg3200.txt +carve pg3200.txt +carve. pg100.txt +carved pg3200.txt +carved, pg3200.txt +carved. pg3200.txt +carves, pg100.txt +carving pg3200.txt +carving-knife pg3200.txt +carvings pg3200.txt +carvings. pg3200.txt +cary, pg3200.txt +cas'd pg100.txt +casanova pg3200.txt +casca, pg3200.txt, pg100.txt +casca. pg100.txt +casca? pg100.txt +cascades pg3200.txt +cascades, pg3200.txt +case pg31100.txt, pg3200.txt, pg100.txt +case! pg3200.txt, pg100.txt +case!' pg3200.txt +case) pg31100.txt +case, pg31100.txt, pg3200.txt, pg100.txt +case--" pg31100.txt +case----" pg3200.txt +case--and pg3200.txt +case--how pg3200.txt +case--not pg3200.txt +case--you pg31100.txt +case-knife." pg3200.txt +case-knives." pg3200.txt +case. pg31100.txt, pg3200.txt, pg100.txt +case." pg31100.txt, pg3200.txt +case."] pg3200.txt +case.' pg3200.txt +case.... pg3200.txt +case.] pg3200.txt +case: pg100.txt +case; pg31100.txt, pg3200.txt +case? pg3200.txt, pg100.txt +case?" pg31100.txt, pg3200.txt +cased pg3200.txt +casement pg100.txt +casement. pg100.txt +casements. pg3200.txt +casements; pg100.txt +cases pg31100.txt, pg3200.txt, pg100.txt +cases! pg100.txt +cases, pg3200.txt +cases. pg3200.txt +cases? pg3200.txt +cash pg3200.txt +cash, pg3200.txt +cash. pg3200.txt +cash." pg31100.txt +cashbox pg3200.txt +cashier'd. pg100.txt +cashier'd; pg100.txt +cashier, pg3200.txt +cashier; pg3200.txt +casino pg3200.txt +casino." pg3200.txt +cask pg3200.txt, pg100.txt +cask. pg3200.txt +casket pg100.txt +casket, pg100.txt +casket? pg3200.txt, pg100.txt +casket] pg100.txt +caskets pg100.txt +caskets. pg100.txt +casque pg100.txt +casques pg100.txt +cassandra pg100.txt +cassandra. pg100.txt +cassel pg31100.txt +cassi pg3200.txt +cassias pg3200.txt +cassibelan, pg100.txt +cassibelan. pg100.txt +cassino, pg31100.txt +cassio pg100.txt +cassio! pg100.txt +cassio, pg100.txt +cassio. pg100.txt +cassio; pg100.txt +cassio? pg100.txt +cassiopeia." pg31100.txt +cassius pg100.txt +cassius! pg100.txt +cassius, pg100.txt +cassius. pg100.txt +cassius; pg100.txt +cassius? pg100.txt +cast pg31100.txt, pg3200.txt, pg100.txt +cast- pg3200.txt +cast--but pg3200.txt +cast-away! pg100.txt +cast-iron pg3200.txt +cast. pg3200.txt +cast." pg3200.txt +castaway, pg100.txt +castaways, pg100.txt +caste pg3200.txt +caste, pg3200.txt +caste-brother pg3200.txt +caste-custom. pg3200.txt +caste. pg3200.txt +caster. pg3200.txt +casters, pg3200.txt +castes pg3200.txt +castigation. pg3200.txt +castile pg100.txt +casting pg3200.txt, pg100.txt +casting-vote pg3200.txt +castle pg31100.txt, pg3200.txt, pg100.txt +castle's pg3200.txt +castle, pg31100.txt, pg3200.txt, pg100.txt +castle- pg100.txt +castle--felix pg3200.txt +castle--or pg3200.txt +castle. pg3200.txt, pg100.txt +castle." pg3200.txt +castle; pg100.txt +castle? pg3200.txt, pg100.txt +castle?" pg31100.txt, pg3200.txt +castlemaine, pg3200.txt +castles pg31100.txt, pg3200.txt +castles, pg3200.txt +castles. pg3200.txt +castles: pg100.txt +castles; pg100.txt +casts pg3200.txt, pg100.txt +casual pg3200.txt +casually pg3200.txt, pg100.txt +casually, pg3200.txt +casualness pg3200.txt +casualties pg3200.txt +casualty. pg100.txt +cat pg3200.txt, pg100.txt +cat! pg3200.txt +cat's pg3200.txt +cat, pg3200.txt, pg100.txt +cat- pg3200.txt +cat--" pg3200.txt +cat--and pg3200.txt +cat-fishes, pg3200.txt +cat-nap pg3200.txt +cat-nap. pg3200.txt +cat. pg3200.txt, pg100.txt +cat." pg3200.txt +cat: pg3200.txt +cat; pg3200.txt, pg100.txt +cat? pg3200.txt +cat?" pg3200.txt +cat?' pg3200.txt +cat] pg3200.txt +cataclysm; pg3200.txt +catacombs pg3200.txt +catahoula. pg3200.txt +catalogue pg100.txt +catalogues pg3200.txt +catalogues. pg3200.txt +catapults pg3200.txt +cataract pg3200.txt +cataract. pg3200.txt +cataracts pg3200.txt +cataracts"--and pg3200.txt +catarrh, pg3200.txt +catastrophe pg3200.txt +catastrophe, pg31100.txt +catastrophe. pg3200.txt, pg100.txt +catastrophe? pg3200.txt +catastrophes pg3200.txt +catcalls pg3200.txt +catch pg31100.txt, pg3200.txt, pg100.txt +catch'd, pg100.txt +catch, pg3200.txt, pg100.txt +catch-line pg3200.txt +catch. pg3200.txt, pg100.txt +catch." pg31100.txt +catch: pg100.txt +catch; pg100.txt +catched pg3200.txt +catcher pg3200.txt +catches pg3200.txt +catches. pg100.txt +catching pg31100.txt, pg3200.txt, pg100.txt +catching, pg3200.txt +catching. pg100.txt +catching." pg31100.txt +catchword, pg31100.txt +catechism. pg100.txt +catechize pg100.txt +catechized pg3200.txt +cater-cousins- pg100.txt +caterpillar pg3200.txt +caterpillar--any pg3200.txt +caterpillar. pg3200.txt +caterpillars? pg100.txt +catesby pg100.txt +catesby! pg100.txt +catesby, pg100.txt +catesby. pg100.txt +catesby; pg100.txt +catfish--provisions pg3200.txt +catharina!" pg3200.txt +catharina. pg3200.txt +cathay pg3200.txt +cathedral pg3200.txt +cathedral, pg3200.txt +cathedral," pg3200.txt +cathedral. pg3200.txt +cathedral." pg3200.txt +cathedral; pg3200.txt +cathedrals. pg3200.txt +catherine pg31100.txt, pg3200.txt +catherine's pg31100.txt +catherine), pg31100.txt +catherine, pg31100.txt, pg3200.txt +catherine--as pg3200.txt +catherine--finding pg31100.txt +catherine. pg31100.txt, pg3200.txt +catherine." pg31100.txt +catherine; pg31100.txt +catherine?" pg3200.txt +catholic pg3200.txt +catholic, pg3200.txt +catholic. pg3200.txt +catholics pg3200.txt +catholics, pg3200.txt +cathy pg3200.txt +cathy. pg3200.txt +catling? pg100.txt +catlings pg100.txt +cato, pg100.txt +cats pg3200.txt +cats, pg3200.txt +cats--bliss pg3200.txt +cats--rich pg3200.txt +cats--very pg3200.txt +cats. pg3200.txt +cats." pg31100.txt, pg3200.txt +cats; pg3200.txt +cats?" pg3200.txt +catsup. pg3200.txt +cattle pg3200.txt +cattle, pg3200.txt, pg100.txt +cattle- pg3200.txt +cattle-friend, pg3200.txt +cattle-pens pg3200.txt +cattle-raiser pg3200.txt +cattle. pg3200.txt +cattle." pg3200.txt +cattle; pg3200.txt +catty pg3200.txt +caucasus. pg100.txt +caucasus? pg100.txt +cauchon pg3200.txt +cauchon's pg3200.txt +cauchon--and pg3200.txt +cauchon--though pg3200.txt +cauchon. pg3200.txt +caucusing pg3200.txt +caught pg31100.txt, pg3200.txt, pg100.txt +caught, pg3200.txt, pg100.txt +caught--and pg3200.txt +caught. pg31100.txt, pg3200.txt, pg100.txt +caught." pg3200.txt +caught.' pg100.txt +caught?" pg3200.txt +caulaincourt?" pg3200.txt +cauldron pg3200.txt +cauliflower pg3200.txt +caulker pg3200.txt +caus'd pg100.txt +cause pg31100.txt, pg3200.txt, pg100.txt +cause! pg100.txt +cause!!! pg3200.txt +cause, pg31100.txt, pg3200.txt, pg100.txt +cause- pg100.txt +cause--it pg3200.txt +cause. pg31100.txt, pg3200.txt, pg100.txt +cause." pg31100.txt, pg3200.txt +cause; pg31100.txt, pg100.txt +cause? pg100.txt +caused pg3200.txt +causeless pg31100.txt +causerie pg3200.txt +causes pg3200.txt, pg100.txt +causes, pg31100.txt, pg3200.txt +causes. pg3200.txt +causes: pg100.txt +causeway pg3200.txt +causing pg3200.txt +causing, pg3200.txt +cautelous, pg100.txt +caution pg31100.txt, pg3200.txt +caution, pg3200.txt +caution. pg3200.txt +caution: pg3200.txt +caution; pg31100.txt +cautions pg31100.txt, pg100.txt +cautious pg31100.txt, pg3200.txt +cautious, pg31100.txt, pg3200.txt +cautious. pg3200.txt +cautiously pg3200.txt +cautiously, pg3200.txt +cautiously--armed pg3200.txt +cautiously: pg3200.txt +cautiousness pg31100.txt +cavalcade pg3200.txt +cavalcade, pg3200.txt +cavalcades pg3200.txt +cavalieresses pg3200.txt +cavaliers pg3200.txt +cavalry pg3200.txt +cavalry's pg3200.txt +cavalry, pg3200.txt +cavalry. pg3200.txt +cave pg3200.txt, pg100.txt +cave!" pg3200.txt +cave, pg3200.txt, pg100.txt +cave- pg3200.txt, pg100.txt +cave--" pg3200.txt +cave-bear, pg3200.txt +cave-dwellers--merely pg3200.txt +cave-keeper, pg100.txt +cave. pg3200.txt, pg100.txt +cave; pg100.txt +cave? pg100.txt +cave?" pg3200.txt +cave] pg100.txt +caveated pg3200.txt +caved pg3200.txt +cavern pg3200.txt +cavern, pg3200.txt +cavernous pg3200.txt +caverns pg3200.txt +caves pg3200.txt +caves"; pg3200.txt +caves, pg3200.txt, pg100.txt +caves; pg3200.txt, pg100.txt +cavil pg3200.txt +cavilers. pg3200.txt +caving pg3200.txt +cavities pg3200.txt +cavity pg3200.txt +cawdor pg100.txt +cawdor! pg100.txt +cawdor. pg100.txt +cawdor? pg100.txt +cawdron. pg100.txt +cawnpore pg3200.txt +cawnpore, pg3200.txt +cawnpore. pg3200.txt +cayote pg3200.txt +cayote, pg3200.txt +cclvi- pg3200.txt +ce pg100.txt +ceas'd pg100.txt +ceas'd, pg100.txt +ceas'd; pg100.txt +cease pg31100.txt, pg3200.txt, pg100.txt +cease! pg100.txt +cease!' pg3200.txt +cease, pg3200.txt, pg100.txt +cease. pg3200.txt, pg100.txt +cease] pg100.txt +ceased pg31100.txt, pg3200.txt +ceased, pg31100.txt, pg3200.txt +ceased. pg31100.txt, pg3200.txt +ceased; pg3200.txt +ceaseless pg31100.txt, pg3200.txt +ceases pg3200.txt +ceases, pg31100.txt +ceases. pg100.txt +ceases] pg100.txt +ceasing pg31100.txt, pg3200.txt +ceasing, pg3200.txt +ceasing." pg31100.txt +ceasing; pg3200.txt +cecilia, pg31100.txt +cedar pg3200.txt, pg100.txt +cedar. pg100.txt +ceded pg3200.txt +ceilin' pg3200.txt +ceiling pg31100.txt, pg3200.txt +ceiling--nothing pg3200.txt +ceiling. pg3200.txt +ceilings pg3200.txt +celebrants. pg3200.txt +celebrate pg3200.txt, pg100.txt +celebrate, pg3200.txt, pg100.txt +celebrate." pg3200.txt +celebrated pg31100.txt, pg3200.txt +celebrated, pg3200.txt, pg100.txt +celebrated," pg3200.txt +celebrated--because pg3200.txt +celebrated. pg3200.txt, pg100.txt +celebrated; pg3200.txt +celebrates pg3200.txt, pg100.txt +celebrating pg3200.txt +celebrating, pg3200.txt +celebrating. pg3200.txt +celebration pg31100.txt +celebration, pg100.txt +celebration. pg3200.txt +celebrities pg3200.txt +celebrity pg3200.txt +celebrity!" pg3200.txt +celebrity, pg3200.txt +celebrity. pg3200.txt +celerity pg100.txt +celerity, pg3200.txt, pg100.txt +celery, pg3200.txt +celery. pg3200.txt +celia pg100.txt +cell pg3200.txt, pg100.txt +cell, pg3200.txt, pg100.txt +cell. pg3200.txt, pg100.txt +cell." pg3200.txt +cell; pg100.txt +cell? pg100.txt +cellar pg3200.txt +cellar, pg3200.txt +cellar. pg3200.txt +cellar; pg3200.txt +cellar?" pg3200.txt +cellarage. pg100.txt +celled pg3200.txt +cellini, pg3200.txt +cells pg3200.txt +celt pg3200.txt +cement pg3200.txt +cement, pg3200.txt, pg100.txt +cemeteries pg3200.txt +cemeteries.' pg3200.txt +cemetery pg3200.txt +cemetery! pg3200.txt +cemetery's pg3200.txt +cemetery-occupants pg3200.txt +cemetery. pg3200.txt +cemetery." pg3200.txt +cenci pg3200.txt +cenotaphs pg3200.txt +censer, pg100.txt +censor, pg100.txt +censur'd pg100.txt +censur'd? pg100.txt +censure pg31100.txt, pg3200.txt, pg100.txt +censure, pg31100.txt +censure. pg31100.txt, pg3200.txt, pg100.txt +censure; pg31100.txt +censured pg100.txt +censured. pg100.txt +censurers, pg100.txt +censures pg100.txt +census: pg3200.txt +cent pg3200.txt +cent!" pg3200.txt +cent, pg3200.txt +cent--buy pg3200.txt +cent--he pg3200.txt +cent--pass pg3200.txt +cent. pg3200.txt +cent." pg3200.txt +cent., pg3200.txt +cent.--[however, pg3200.txt +centaur. pg100.txt +centaurs, pg100.txt +centennial pg3200.txt +centennial. pg3200.txt +center pg3200.txt +center, pg3200.txt +center- pg3200.txt +center. pg3200.txt +centered pg3200.txt +centered; pg31100.txt +centerpiece pg3200.txt +centers. pg3200.txt +centers; pg3200.txt +centime. pg3200.txt +centimes." pg3200.txt +central pg3200.txt +central!" pg3200.txt +central, pg3200.txt +central." pg3200.txt +centralize pg3200.txt +centre pg31100.txt, pg3200.txt +centre, pg3200.txt, pg100.txt +centre- pg100.txt +centre--it pg3200.txt +centre-pole, pg3200.txt +centre-tables pg3200.txt +centre. pg100.txt +centre; pg100.txt +centred pg3200.txt +cents pg3200.txt +cents' pg3200.txt +cents): pg3200.txt +cents, pg3200.txt +cents--and pg3200.txt +cents--will pg3200.txt +cents. pg3200.txt +cents." pg3200.txt +cents; pg3200.txt +cents? pg3200.txt +centuries pg3200.txt +centuries!" pg3200.txt +centuries!' pg3200.txt +centuries, pg3200.txt +centuries. pg3200.txt +centuries." pg3200.txt +centuries; pg3200.txt +century pg3200.txt +century), pg3200.txt +century, pg3200.txt +century--flint pg3200.txt +century--he pg3200.txt +century-long pg3200.txt +century. pg31100.txt, pg3200.txt +century." pg3200.txt +century; pg3200.txt +century? pg3200.txt +cerberus pg100.txt +ceremonial pg3200.txt +ceremonials. pg3200.txt +ceremonies pg31100.txt, pg3200.txt, pg100.txt +ceremonies, pg100.txt +ceremonies. pg100.txt +ceremonies? pg3200.txt +ceremonious pg3200.txt +ceremonious, pg31100.txt +ceremoniously pg3200.txt +ceremoniously, pg3200.txt +ceremony pg31100.txt, pg3200.txt, pg100.txt +ceremony, pg31100.txt, pg3200.txt, pg100.txt +ceremony. pg31100.txt, pg100.txt +ceremony." pg31100.txt +ceremony._] pg31100.txt +ceremony; pg3200.txt, pg100.txt +ceremony? pg100.txt +ceres pg100.txt +certain pg31100.txt, pg3200.txt, pg100.txt +certain! pg3200.txt +certain, pg31100.txt, pg3200.txt, pg100.txt +certain- pg100.txt +certain--a pg31100.txt +certain. pg31100.txt, pg3200.txt, pg100.txt +certain." pg31100.txt, pg3200.txt +certain.' pg3200.txt +certain: pg100.txt +certain; pg31100.txt, pg3200.txt, pg100.txt +certain? pg100.txt +certain?" pg31100.txt +certainer. pg100.txt +certainly pg31100.txt, pg3200.txt, pg100.txt +certainly! pg3200.txt +certainly!" pg3200.txt +certainly, pg31100.txt, pg3200.txt +certainly- pg100.txt +certainly. pg31100.txt, pg3200.txt +certainly." pg31100.txt, pg3200.txt +certainly; pg3200.txt, pg100.txt +certainly?" pg3200.txt +certainties pg100.txt +certainty pg31100.txt, pg3200.txt, pg100.txt +certainty, pg3200.txt, pg100.txt +certainty--he pg31100.txt +certainty. pg31100.txt, pg3200.txt +certainty; pg31100.txt, pg3200.txt +certificate pg3200.txt +certificate. pg100.txt +certified pg3200.txt, pg100.txt +cervantes pg3200.txt +cesarea pg3200.txt +cesario, pg100.txt +cesario- pg100.txt +cesario? pg100.txt +cess. pg100.txt +cessation pg3200.txt +cessation. pg3200.txt +cesse! pg100.txt +cetera. pg3200.txt +cetera.--ibid. pg3200.txt +cetewayo. pg3200.txt +cette pg3200.txt +ceux pg3200.txt +ceylon pg3200.txt +ceylon, pg3200.txt +ceylon--a pg3200.txt +ceylon. pg3200.txt +ch'm, pg3200.txt +ch-chow-wow! pg3200.txt +chaf'd, pg100.txt +chaf'd? pg100.txt +chafe pg3200.txt +chafe. pg100.txt +chafed pg3200.txt +chafes pg3200.txt, pg100.txt +chaff pg3200.txt, pg100.txt +chaff, pg3200.txt, pg100.txt +chaff; pg3200.txt +chaffing pg3200.txt +chaffingly pg3200.txt +chaffings pg3200.txt +chafing pg3200.txt +chagrin pg3200.txt +chagrin. pg31100.txt, pg3200.txt +chagrined, pg3200.txt +chain pg31100.txt, pg3200.txt, pg100.txt +chain! pg100.txt +chain, pg31100.txt, pg3200.txt, pg100.txt +chain-mail pg3200.txt +chain-of pg3200.txt +chain. pg3200.txt, pg100.txt +chain." pg3200.txt +chain; pg100.txt +chain? pg100.txt +chained pg3200.txt +chained; pg31100.txt +chains pg3200.txt, pg100.txt +chains! pg3200.txt, pg100.txt +chains, pg100.txt +chains. pg3200.txt +chains; pg3200.txt +chair pg31100.txt, pg3200.txt, pg100.txt +chair! pg100.txt +chair, pg31100.txt, pg3200.txt, pg100.txt +chair--" pg3200.txt +chair-back, pg3200.txt +chair-maker pg3200.txt +chair-rounds pg3200.txt +chair. pg31100.txt, pg3200.txt, pg100.txt +chair." pg31100.txt +chair; pg100.txt +chair? pg100.txt +chair] pg100.txt +chairmaker pg3200.txt +chairman pg3200.txt +chairman." pg3200.txt +chairman.) pg3200.txt +chairs pg3200.txt +chairs, pg31100.txt, pg3200.txt +chairs. pg3200.txt +chairs; pg3200.txt +chaise pg31100.txt +chaise, pg3200.txt +chaise. pg31100.txt +chaise." pg31100.txt +chaldee pg3200.txt +chalet pg3200.txt +chaleteer pg3200.txt +chaleteer's pg3200.txt +chalets pg3200.txt +chalets, pg3200.txt +chalice pg3200.txt, pg100.txt +chalk pg3200.txt +chalk, pg3200.txt +chalks. pg3200.txt +challeng'd pg100.txt +challeng'd. pg100.txt +challenge pg3200.txt, pg100.txt +challenge! pg100.txt +challenge, pg3200.txt, pg100.txt +challenge. pg100.txt +challenged pg3200.txt +challenged, pg3200.txt +challenger. pg100.txt +challenges pg3200.txt +challie pg3200.txt +chalons pg3200.txt +chalons, pg3200.txt +chamber pg3200.txt, pg100.txt +chamber's pg3200.txt +chamber, pg31100.txt, pg3200.txt, pg100.txt +chamber-door pg100.txt +chamber-maid! pg3200.txt +chamber. pg3200.txt, pg100.txt +chamber; pg3200.txt, pg100.txt +chamber? pg100.txt +chamberlain pg100.txt +chamberlain! pg100.txt +chamberlain'; pg100.txt +chamberlain, pg3200.txt, pg100.txt +chamberlain. pg100.txt +chamberlain; pg100.txt +chamberlains pg100.txt +chambermaid pg31100.txt, pg3200.txt +chambermaid. pg100.txt +chambermaiding, pg3200.txt +chambermaids pg3200.txt +chambermaids, pg3200.txt +chambers pg31100.txt, pg3200.txt +chambers"--fine, pg3200.txt +chambers, pg31100.txt, pg3200.txt, pg100.txt +chambers. pg3200.txt +chameleon pg3200.txt +chameleon, pg100.txt +chameleon. pg100.txt +chamois pg3200.txt +chamois, pg3200.txt +chamois: pg3200.txt +chamois; pg3200.txt +chamois] pg3200.txt +chamonix, pg3200.txt +chamonix. pg3200.txt +chamonix; pg3200.txt +chamonix] pg3200.txt +champagne pg3200.txt +champagne, pg3200.txt +champagne. pg3200.txt +champagne." pg3200.txt +champion pg31100.txt, pg3200.txt, pg100.txt +champion, pg3200.txt, pg100.txt +champion--oh, pg3200.txt +champion. pg3200.txt, pg100.txt +champions, pg3200.txt +champions; pg100.txt +championship. pg3200.txt +champney, pg3200.txt +chanaz. pg3200.txt +chanc'd, pg100.txt +chance pg31100.txt, pg3200.txt, pg100.txt +chance! pg3200.txt +chance!" pg3200.txt +chance, pg31100.txt, pg3200.txt, pg100.txt +chance- pg100.txt +chance--come!" pg3200.txt +chance. pg31100.txt, pg3200.txt, pg100.txt +chance." pg3200.txt +chance.' pg3200.txt +chance.--i pg31100.txt +chance: pg3200.txt, pg100.txt +chance; pg31100.txt, pg3200.txt +chance?" pg3200.txt +chanced pg31100.txt, pg3200.txt +chanced! pg3200.txt +chanced. pg100.txt +chancellor pg3200.txt, pg100.txt +chancellor- pg100.txt +chancellor. pg3200.txt +chancellor." pg3200.txt +chancellorsville pg3200.txt +chances pg31100.txt, pg3200.txt, pg100.txt +chances, pg3200.txt, pg100.txt +chances. pg3200.txt +chances." pg3200.txt +chancing pg31100.txt +chancy pg3200.txt +chandelier, pg3200.txt +chandeliers, pg3200.txt +chandelles pg3200.txt +chang pg3200.txt +chang'd pg100.txt +chang'd- pg100.txt +chang'd. pg100.txt +chang'd: pg100.txt +chang'd; pg100.txt +change pg31100.txt, pg3200.txt, pg100.txt +change! pg31100.txt, pg100.txt +change, pg31100.txt, pg3200.txt, pg100.txt +change--as pg3200.txt +change. pg31100.txt, pg3200.txt, pg100.txt +change." pg31100.txt, pg3200.txt +change: pg31100.txt +change; pg3200.txt, pg100.txt +change? pg31100.txt, pg100.txt +change?" pg31100.txt, pg3200.txt +changed pg31100.txt, pg3200.txt +changed, pg31100.txt, pg3200.txt +changed. pg3200.txt, pg100.txt +changed." pg3200.txt +changed.-- pg3200.txt +changeling; pg100.txt +changelings pg3200.txt +changes pg31100.txt, pg3200.txt +changes, pg3200.txt, pg100.txt +changes--in pg3200.txt +changes. pg31100.txt +changes." pg31100.txt +changes; pg3200.txt +changing pg31100.txt, pg3200.txt +changing. pg100.txt +channel pg31100.txt, pg3200.txt +channel, pg31100.txt, pg3200.txt +channel--the pg3200.txt +channel. pg100.txt +channel: pg3200.txt +channels pg3200.txt +channels, pg3200.txt +channels. pg3200.txt +channels; pg3200.txt +chanst." pg3200.txt +chant pg3200.txt +chant)--"you pg3200.txt +chanticleer pg100.txt +chanting pg3200.txt +chanting, pg3200.txt +chants pg100.txt +chants, pg100.txt +chaos pg3200.txt +chaos, pg3200.txt +chaotic pg3200.txt +chap pg3200.txt +chap! pg3200.txt +chap, pg3200.txt +chap. pg3200.txt +chap." pg3200.txt +chapeau pg3200.txt +chapel pg31100.txt, pg3200.txt +chapel, pg31100.txt, pg3200.txt +chapel--when pg3200.txt +chapel. pg100.txt +chapel? pg100.txt +chaperone pg31100.txt +chaplain pg31100.txt, pg3200.txt +chaplain. pg3200.txt +chaplains- pg100.txt +chapman pg31100.txt +chaps pg3200.txt +chaps, pg100.txt +chapter pg31100.txt, pg3200.txt +chapter, pg3200.txt +chapter. pg3200.txt +chapter." pg3200.txt +chapter.] pg3200.txt +chapter: pg3200.txt +chapters pg31100.txt, pg3200.txt +chapters) pg3200.txt +chapters, pg3200.txt +chapters--laughing pg3200.txt +chapters. pg3200.txt +chapters; pg3200.txt +character pg31100.txt, pg3200.txt +character! pg31100.txt +character!--for pg31100.txt +character's pg3200.txt +character, pg31100.txt, pg3200.txt, pg100.txt +character--monkish pg3200.txt +character--our pg3200.txt +character--some pg3200.txt +character. pg31100.txt, pg3200.txt, pg100.txt +character." pg31100.txt, pg3200.txt +character: pg3200.txt +character; pg31100.txt, pg3200.txt, pg100.txt +character? pg31100.txt +character?" pg31100.txt +characteristic pg3200.txt +characteristic. pg3200.txt +characteristically pg3200.txt +characteristics pg3200.txt +characteristics, pg3200.txt +characteristics. pg3200.txt +characterizing pg3200.txt +characterless pg3200.txt +characterless." pg3200.txt +characters pg31100.txt, pg3200.txt +characters, pg31100.txt, pg3200.txt, pg100.txt +characters- pg100.txt +characters. pg3200.txt +characters; pg31100.txt +charactery. pg100.txt +charade pg31100.txt +charade, pg31100.txt +charade. pg31100.txt +charade." pg31100.txt +charade.--you pg31100.txt +charades pg31100.txt +charades. pg3200.txt +charcoal pg3200.txt +charcoal, pg3200.txt +charcoal. pg3200.txt +chares. pg100.txt +charg'd pg100.txt +charg'd, pg100.txt +charge pg31100.txt, pg3200.txt, pg100.txt +charge! pg31100.txt, pg3200.txt, pg100.txt +charge!" pg3200.txt +charge, pg31100.txt, pg3200.txt, pg100.txt +charge--and pg3200.txt +charge-house pg100.txt +charge. pg31100.txt, pg3200.txt, pg100.txt +charge." pg31100.txt +charge; pg100.txt +charge? pg3200.txt, pg100.txt +charged pg31100.txt, pg3200.txt, pg100.txt +charged, pg100.txt +charged. pg3200.txt, pg100.txt +charged." pg3200.txt +charged; pg3200.txt +charges pg3200.txt, pg100.txt +charges, pg3200.txt, pg100.txt +charges. pg100.txt +charging pg3200.txt +chariot-wheels, pg3200.txt +chariots pg100.txt +charitable pg3200.txt +charitable, pg31100.txt, pg100.txt +charitable." pg3200.txt +charitably pg3200.txt +charitably, pg3200.txt +charities pg3200.txt, pg100.txt +charities, pg31100.txt, pg3200.txt +charity pg31100.txt, pg3200.txt, pg100.txt +charity! pg3200.txt, pg100.txt +charity!' pg100.txt +charity) pg3200.txt +charity, pg31100.txt, pg3200.txt, pg100.txt +charity--one pg3200.txt +charity-sermons! pg3200.txt +charity. pg3200.txt, pg100.txt +charity: pg3200.txt +charity; pg3200.txt, pg100.txt +charity? pg100.txt +charlemagne. pg3200.txt +charlemagne; pg3200.txt +charles pg31100.txt, pg3200.txt, pg100.txt +charles's pg31100.txt +charles, pg31100.txt, pg3200.txt, pg100.txt +charles. pg31100.txt, pg100.txt +charles." pg31100.txt, pg3200.txt +charles; pg100.txt +charles? pg100.txt +charleston. pg3200.txt +charley pg3200.txt +charley! pg3200.txt +charley, pg3200.txt +charley--and pg3200.txt +charlie pg3200.txt +charlotte pg31100.txt, pg3200.txt +charlotte's pg31100.txt +charlotte, pg31100.txt +charlotte--impossible!" pg31100.txt +charlotte. pg31100.txt +charlotte." pg31100.txt +charm pg31100.txt, pg3200.txt, pg100.txt +charm'd, pg100.txt +charm'd; pg100.txt +charm, pg31100.txt, pg3200.txt, pg100.txt +charm- pg100.txt +charm. pg31100.txt, pg3200.txt +charm." pg31100.txt +charmant pg3200.txt +charmed pg31100.txt, pg3200.txt +charmed. pg3200.txt, pg100.txt +charmian pg100.txt +charmian! pg100.txt +charmian, pg100.txt +charmian- pg100.txt +charmian. pg100.txt +charmian] pg100.txt +charming pg31100.txt, pg3200.txt, pg100.txt +charming!" pg3200.txt +charming!--was pg31100.txt +charming" pg31100.txt +charming, pg31100.txt, pg3200.txt +charming. pg31100.txt, pg3200.txt +charming; pg31100.txt +charming?" pg31100.txt +charming_."] pg31100.txt +charmingly pg31100.txt, pg3200.txt +charmingly, pg31100.txt, pg3200.txt +charmingly." pg31100.txt +charms pg31100.txt, pg3200.txt, pg100.txt +charms, pg3200.txt, pg100.txt +charms. pg31100.txt, pg100.txt +charms." pg31100.txt, pg3200.txt +charms: pg3200.txt +charms? pg100.txt +charneco. pg100.txt +charolois; pg100.txt +charon, pg100.txt +charred pg3200.txt +chart pg3200.txt +chart, pg3200.txt +chart. pg3200.txt +charter pg3200.txt +charter-christian pg3200.txt +charter. pg3200.txt +chartered pg3200.txt +chartered. pg3200.txt +charters; pg100.txt +chartres pg3200.txt +chartreux. pg100.txt +chary pg3200.txt, pg100.txt +charybdis." pg3200.txt +chas'd pg100.txt +chas'd! pg100.txt +chas'd. pg100.txt +chas. pg3200.txt +chase pg3200.txt +chase! pg100.txt +chase, pg100.txt +chase,98 pg3200.txt +chase. pg100.txt +chase; pg100.txt +chased pg3200.txt +chasing pg3200.txt +chasing, pg3200.txt +chasm pg31100.txt, pg3200.txt +chasmed pg3200.txt +chasms pg3200.txt +chasms, pg3200.txt +chaste pg100.txt +chaste? pg100.txt +chastely. pg100.txt +chastise pg100.txt +chastised pg100.txt +chastisement pg3200.txt +chastisement, pg100.txt +chastisement; pg100.txt +chastisement? pg100.txt +chastity pg3200.txt, pg100.txt +chastity, pg100.txt +chastity--in pg3200.txt +chastity. pg100.txt +chat pg31100.txt, pg3200.txt +chat, pg31100.txt, pg3200.txt +chat. pg3200.txt, pg100.txt +chatham pg100.txt +chatillon pg3200.txt, pg100.txt +chatillon, pg3200.txt +chatillon. pg100.txt +chatsworth, pg31100.txt +chattahoochee pg3200.txt +chattahoochee, pg3200.txt +chatted, pg3200.txt +chatted; pg3200.txt +chattel, pg3200.txt +chattels. pg3200.txt +chatter." pg3200.txt +chattered, pg3200.txt +chattering pg3200.txt +chattering; pg3200.txt +chattily pg3200.txt +chatting pg3200.txt +chatto pg3200.txt +chatty pg31100.txt, pg3200.txt +chatty, pg31100.txt, pg3200.txt +chaud. pg3200.txt +chauncey pg3200.txt +chautauqua, pg3200.txt +chaw pg3200.txt +chawdron, pg100.txt +chawing pg3200.txt +chawl pg3200.txt +chaws pg3200.txt +cheap pg31100.txt, pg3200.txt +cheap, pg3200.txt +cheap. pg31100.txt, pg3200.txt +cheap; pg100.txt +cheapen, pg3200.txt +cheapen." pg3200.txt +cheaper pg3200.txt +cheaper. pg3200.txt +cheapest pg3200.txt +cheaply pg3200.txt +cheapside, pg3200.txt, pg100.txt +cheapside." pg31100.txt +chearfully pg31100.txt +cheat pg3200.txt +cheat. pg3200.txt +cheat?" pg3200.txt +cheated pg3200.txt, pg100.txt +cheating pg3200.txt +check pg31100.txt, pg3200.txt, pg100.txt +check'd pg100.txt +check'd. pg100.txt +check, pg100.txt +check--with pg3200.txt +check. pg3200.txt +checked pg31100.txt, pg3200.txt +checked. pg3200.txt +checked.] pg3200.txt +checker-board pg3200.txt +checkerboards. pg3200.txt +checkered pg3200.txt +checking pg31100.txt, pg3200.txt +checkmated. pg3200.txt +checks pg3200.txt, pg100.txt +checks, pg100.txt +checks--for pg3200.txt +cheek pg3200.txt, pg100.txt +cheek! pg100.txt +cheek!" pg3200.txt +cheek, pg3200.txt, pg100.txt +cheek-roses pg100.txt +cheek. pg3200.txt, pg100.txt +cheek." pg3200.txt +cheek: pg100.txt +cheek; pg3200.txt, pg100.txt +cheek? pg3200.txt +cheeks pg31100.txt, pg3200.txt, pg100.txt +cheeks! pg100.txt +cheeks, pg3200.txt, pg100.txt +cheeks- pg100.txt +cheeks. pg31100.txt, pg3200.txt, pg100.txt +cheeks: pg100.txt +cheeks; pg3200.txt +cheeks? pg100.txt +cheeky pg3200.txt +cheer pg31100.txt, pg3200.txt, pg100.txt +cheer'd; pg100.txt +cheer, pg3200.txt, pg100.txt +cheer. pg3200.txt, pg100.txt +cheer." pg3200.txt +cheer: pg3200.txt +cheer; pg100.txt +cheer? pg3200.txt, pg100.txt +cheered pg31100.txt, pg3200.txt +cheerful pg31100.txt, pg3200.txt, pg100.txt +cheerful! pg31100.txt +cheerful, pg31100.txt, pg3200.txt +cheerful--" pg3200.txt +cheerful. pg31100.txt, pg3200.txt, pg100.txt +cheerful." pg31100.txt +cheerful.' pg3200.txt +cheerfuler. pg3200.txt +cheerfuller, pg3200.txt +cheerfully pg31100.txt, pg3200.txt +cheerfully, pg3200.txt +cheerfully. pg31100.txt, pg3200.txt, pg100.txt +cheerfully." pg3200.txt +cheerfully: pg3200.txt +cheerfully; pg100.txt +cheerfulness pg31100.txt, pg3200.txt +cheerfulness, pg31100.txt, pg3200.txt +cheerfulness. pg31100.txt, pg3200.txt +cheerfulness: pg3200.txt +cheerily pg3200.txt +cheerily: pg3200.txt +cheeriness: pg3200.txt +cheering pg3200.txt +cheering, pg3200.txt +cheering. pg3200.txt +cheering.] pg3200.txt +cheerless pg3200.txt +cheers pg3200.txt +cheers, pg3200.txt +cheers-- pg3200.txt +cheers--then pg3200.txt +cheery pg3200.txt +cheery, pg3200.txt +cheese pg3200.txt +cheese! pg100.txt +cheese, pg31100.txt, pg3200.txt, pg100.txt +cheese-paring. pg100.txt +cheese. pg3200.txt, pg100.txt +cheese; pg100.txt +chelleeny, pg3200.txt +chelsea, pg3200.txt +chemin pg3200.txt +chenille; pg3200.txt +cheops. pg3200.txt +cheque pg3200.txt +cheques pg3200.txt +cheques--" pg3200.txt +cheques." pg3200.txt +cheques?" pg3200.txt +cher pg100.txt +chercher)." pg3200.txt +chercher.)' pg3200.txt +cherish pg3200.txt, pg100.txt +cherish'd pg100.txt +cherish'd. pg100.txt +cherish, pg100.txt +cherish. pg3200.txt +cherish: pg100.txt +cherish; pg100.txt +cherished pg31100.txt, pg3200.txt +cherished. pg100.txt +cherishing pg31100.txt +cherishing. pg100.txt +cherokee pg3200.txt +cherries. pg3200.txt +cherry-stone; pg100.txt +cherry-tree pg3200.txt +cherry. pg100.txt +cherry." pg3200.txt +cherubin pg100.txt +cherubin, pg100.txt +cherubins; pg100.txt +cheshire; pg31100.txt +cheshu, pg100.txt +chess pg3200.txt, pg100.txt +chess--anything--and pg3200.txt +chess-board pg3200.txt +chess-move, pg3200.txt +chess? pg3200.txt +chest pg100.txt +chest, pg3200.txt, pg100.txt +chest. pg31100.txt, pg100.txt +chester, pg3200.txt +chester." pg3200.txt +chester; pg100.txt +chestnut, pg3200.txt +chestnut-cake pg3200.txt +chestnuts pg31100.txt, pg3200.txt +chestnuts. pg3200.txt +chestnuts?" pg31100.txt +chevalier. pg100.txt +chew pg3200.txt +chew, pg3200.txt +chewed pg100.txt +chewing pg3200.txt +chewing-gum." pg3200.txt +chiaja', pg3200.txt +chibouk pg3200.txt +chibouks, pg3200.txt +chicago pg3200.txt +chicago, pg3200.txt +chicago. pg3200.txt +chicago." pg3200.txt +chicago: pg3200.txt +chicago; pg3200.txt +chicane, pg3200.txt +chicanery. pg3200.txt +chick, pg100.txt +chicken pg3200.txt +chicken, pg31100.txt +chicken--'cuz pg3200.txt +chicken-coop pg3200.txt +chicken-hearted. pg3200.txt +chicken. pg3200.txt +chicken.' pg3200.txt +chickens pg3200.txt, pg100.txt +chickens!" pg3200.txt +chickens, pg3200.txt +chickens. pg3200.txt +chickens." pg3200.txt +chickens?" pg3200.txt +chickering's, pg3200.txt +chicory pg3200.txt +chicurmurco. pg100.txt +chid pg100.txt +chid? pg100.txt +chide pg100.txt +chide, pg100.txt +chide. pg100.txt +chided pg3200.txt +chiding. pg100.txt +chiding? pg100.txt +chief pg31100.txt, pg3200.txt, pg100.txt +chief, pg3200.txt, pg100.txt +chief-- pg3200.txt +chief: pg3200.txt +chiefest pg3200.txt +chiefest? pg3200.txt +chiefly pg31100.txt, pg3200.txt, pg100.txt +chiefly, pg100.txt +chiefly; pg3200.txt +chiefs pg3200.txt +chiefs, pg3200.txt +chiefs--they pg3200.txt +chiefs. pg3200.txt +chiens, pg3200.txt +chil'en pg3200.txt +chil'en, pg3200.txt +chilblains pg31100.txt +chilblains.' pg3200.txt +child pg31100.txt, pg3200.txt, pg100.txt +child! pg31100.txt, pg100.txt +child!" pg3200.txt +child!"--one pg3200.txt +child!--they pg3200.txt +child's pg3200.txt +child, pg31100.txt, pg3200.txt, pg100.txt +child- pg100.txt +child--" pg3200.txt +child--' pg3200.txt +child--and pg3200.txt +child--for pg3200.txt +child-a pg100.txt +child-killer. pg100.txt +child-like pg3200.txt +child. pg31100.txt, pg3200.txt, pg100.txt +child." pg31100.txt, pg3200.txt +child: pg3200.txt, pg100.txt +child; pg31100.txt, pg3200.txt, pg100.txt +child? pg3200.txt, pg100.txt +child?" pg3200.txt +child?' pg3200.txt +child?... pg3200.txt +child] pg100.txt +childe. pg3200.txt +childeric, pg100.txt +childhood pg3200.txt, pg100.txt +childhood's pg3200.txt +childhood, pg31100.txt, pg3200.txt, pg100.txt +childhood. pg3200.txt +childhood: pg3200.txt +childish pg31100.txt, pg3200.txt +childish, pg3200.txt +childish? pg100.txt +childishness; pg3200.txt +childlike pg3200.txt +childlike, pg3200.txt +children pg31100.txt, pg3200.txt, pg100.txt +children! pg100.txt +children!" pg3200.txt +children's pg3200.txt +children, pg31100.txt, pg3200.txt, pg100.txt +children--a pg31100.txt +children--and pg3200.txt +children--but pg3200.txt +children--loaded pg3200.txt +children--ours pg3200.txt +children--rugged pg3200.txt +children. pg31100.txt, pg3200.txt, pg100.txt +children." pg31100.txt, pg3200.txt +children.' pg3200.txt +children.--they pg31100.txt +children: pg3200.txt +children; pg31100.txt, pg3200.txt, pg100.txt +children? pg100.txt +children?" pg31100.txt, pg3200.txt +children?--belong pg3200.txt +chile pg3200.txt +chile. pg3200.txt +chile." pg3200.txt +chile; pg3200.txt +chill pg3200.txt +chill. pg31100.txt +chilled, pg3200.txt +chilliness pg3200.txt +chillon," pg3200.txt +chills pg3200.txt +chilly pg3200.txt +chilly, pg3200.txt +chilly; pg3200.txt +chimblies pg3200.txt +chimbly, pg3200.txt +chime pg3200.txt +chime. pg3200.txt +chimney pg3200.txt, pg100.txt +chimney! pg3200.txt +chimney, pg3200.txt +chimney--you pg3200.txt +chimney-sweep; pg3200.txt +chimney-tops pg3200.txt +chimney. pg31100.txt, pg3200.txt, pg100.txt +chimneypiece pg100.txt +chimneys pg3200.txt +chimneys, pg3200.txt +chimneys. pg3200.txt +chimneys.' pg3200.txt +chimurcho. pg100.txt +chin pg3200.txt, pg100.txt +chin, pg3200.txt, pg100.txt +chin- pg100.txt +chin--and pg3200.txt +chin. pg3200.txt, pg100.txt +chin." pg3200.txt +chin; pg3200.txt, pg100.txt +china pg3200.txt +china, pg3200.txt +china--peking!' pg3200.txt +china. pg3200.txt +china." pg3200.txt +china: pg3200.txt +china; pg3200.txt +chinadom pg3200.txt +chinaman pg3200.txt +chinaman. pg3200.txt +chinamen pg3200.txt +chinamen, pg3200.txt +chinamen. pg3200.txt +chinamen." pg3200.txt +chinese pg3200.txt +chinese. pg3200.txt +chink, pg100.txt +chinks. pg3200.txt, pg100.txt +chinon. pg3200.txt +chins pg3200.txt, pg100.txt +chip pg3200.txt +chipp'd, pg100.txt +chipped pg3200.txt +chipper, pg3200.txt +chipping pg3200.txt +chips pg3200.txt +chips), pg3200.txt +chips, pg100.txt +chirimoya, pg3200.txt +chirked pg3200.txt +chiron pg100.txt +chiron; pg100.txt +chirp. pg3200.txt +chirrah! pg100.txt +chirurgeonly. pg100.txt +chisel pg3200.txt, pg100.txt +chit-chat, pg31100.txt +chits pg3200.txt +chivalric pg3200.txt +chivalry pg31100.txt, pg3200.txt +chivalry! pg100.txt +chivalry, pg100.txt +chivalry. pg3200.txt, pg100.txt +chivalry.' pg100.txt +chivalry; pg100.txt +chivalry? pg100.txt +chloe pg31100.txt +chloe. pg31100.txt +choate pg3200.txt +choate. pg3200.txt +chock pg3200.txt +choctaw." pg3200.txt +choice pg31100.txt, pg3200.txt, pg100.txt +choice! pg3200.txt, pg100.txt +choice!" pg3200.txt +choice"--beware! pg3200.txt +choice, pg31100.txt, pg100.txt +choice. pg31100.txt, pg3200.txt, pg100.txt +choice." pg31100.txt, pg3200.txt +choice: pg100.txt +choice; pg31100.txt, pg3200.txt, pg100.txt +choice? pg3200.txt, pg100.txt +choice?" pg31100.txt +choicest pg3200.txt +choicest, pg3200.txt +choir pg3200.txt +choir, pg3200.txt, pg100.txt +choir-- pg3200.txt +choir. pg3200.txt +choirs. pg3200.txt +choke pg3200.txt +choked pg3200.txt +chokes pg100.txt +choking pg3200.txt +choking. pg100.txt +chokings pg3200.txt +choler pg100.txt +choler! pg100.txt +choler. pg100.txt +choler? pg100.txt +cholera pg3200.txt +cholera, pg3200.txt +cholera. pg3200.txt +choleric pg100.txt +choleric, pg100.txt +choleric. pg100.txt +choosday pg3200.txt +choose pg31100.txt, pg3200.txt, pg100.txt +choose!" pg3200.txt +choose, pg3200.txt, pg100.txt +choose. pg31100.txt, pg3200.txt, pg100.txt +choose." pg31100.txt, pg3200.txt +choose.] pg3200.txt +choose; pg3200.txt, pg100.txt +chooser, pg100.txt +chooses pg31100.txt, pg3200.txt +chooses, pg3200.txt +chooses. pg3200.txt, pg100.txt +chooses." pg31100.txt +choosing pg3200.txt +choosing. pg3200.txt, pg100.txt +chopped pg3200.txt +chopping pg3200.txt +chops pg3200.txt +chops, pg3200.txt +chops? pg100.txt +chords pg3200.txt +chorus pg31100.txt, pg3200.txt, pg100.txt +chorus--] pg3200.txt +chorus. pg3200.txt, pg100.txt +chorus.-- pg31100.txt +chose pg31100.txt, pg3200.txt, pg100.txt +chose, pg31100.txt, pg3200.txt +chose. pg31100.txt, pg3200.txt +chose; pg31100.txt, pg3200.txt +chose? pg3200.txt +chose?" pg31100.txt +chosen pg31100.txt, pg3200.txt, pg100.txt +chosen, pg31100.txt, pg3200.txt +chosen--why pg3200.txt +chosen. pg31100.txt, pg3200.txt, pg100.txt +chosen." pg3200.txt +choses pg3200.txt +chow-ch-chow-chow!" pg3200.txt +chrish pg100.txt +chrish, pg100.txt +christ pg3200.txt +christ!" pg3200.txt +christ's pg3200.txt +christ, pg3200.txt, pg100.txt +christ- pg100.txt +christ--(for pg3200.txt +christ--truth--spirit.' pg3200.txt +christ. pg3200.txt, pg100.txt +christ? pg3200.txt +christadelphians, pg3200.txt +christen pg100.txt +christen'd. pg100.txt +christendom pg3200.txt, pg100.txt +christendom). pg100.txt +christendom, pg3200.txt, pg100.txt +christendom. pg3200.txt, pg100.txt +christendom." pg3200.txt +christendom; pg3200.txt +christendom? pg3200.txt +christendoms pg100.txt +christened. pg3200.txt +christening pg100.txt +christening. pg3200.txt, pg100.txt +christian pg31100.txt, pg3200.txt, pg100.txt +christian!- pg100.txt +christian, pg3200.txt +christian--to pg3200.txt +christian-science pg3200.txt +christian. pg3200.txt, pg100.txt +christian." pg3200.txt +christian.' pg3200.txt +christian; pg3200.txt, pg100.txt +christianity pg3200.txt +christianity. pg3200.txt +christianity." pg3200.txt +christians pg3200.txt, pg100.txt +christians, pg3200.txt +christians. pg100.txt +christians; pg3200.txt +christmas pg31100.txt, pg3200.txt +christmas, pg31100.txt, pg3200.txt +christmas. pg31100.txt +christmas." pg31100.txt, pg3200.txt +christmas; pg31100.txt +christo--" pg3200.txt +chromo pg3200.txt +chromo. pg3200.txt +chromo." pg3200.txt +chromo.' pg3200.txt +chromo; pg3200.txt +chronicle, pg100.txt +chronicle. pg100.txt +chronicle; pg100.txt +chronicled. pg100.txt +chronicles, pg100.txt +chronologic pg3200.txt +chronometry pg3200.txt +chrysolite, pg3200.txt, pg100.txt +chuck, pg100.txt +chuck. pg100.txt +chuck? pg100.txt +chuckle pg3200.txt +chuckle, pg3200.txt +chuckle-headed pg3200.txt +chuckle. pg3200.txt +chuckles pg3200.txt +chud pg100.txt +chuffs; pg100.txt +chum pg3200.txt +chunder pg3200.txt +chunk, pg3200.txt +chunk. pg3200.txt +church pg31100.txt, pg3200.txt, pg100.txt +church! pg100.txt +church!" pg3200.txt +church" pg3200.txt +church"--the pg3200.txt +church's pg31100.txt, pg3200.txt +church's." pg3200.txt +church). pg3200.txt +church, pg31100.txt, pg3200.txt, pg100.txt +church-bell pg3200.txt +church-bells. pg3200.txt +church-building pg3200.txt +church-function pg3200.txt +church-goers pg3200.txt +church-machinery pg3200.txt +church-membership, pg3200.txt +church-sexton, pg3200.txt +church-without-a-creed pg3200.txt +church-yard, pg31100.txt +church-yard." pg31100.txt +church. pg31100.txt, pg3200.txt, pg100.txt +church." pg3200.txt +church.) pg3200.txt +church: pg3200.txt, pg100.txt +church; pg3200.txt, pg100.txt +church? pg3200.txt, pg100.txt +church?" pg3200.txt +churches pg3200.txt +churches"--remains pg3200.txt +churches, pg3200.txt, pg100.txt +churches-- pg3200.txt +churches. pg3200.txt +churches." pg3200.txt +churches; pg3200.txt +churchhill pg31100.txt +churchhill, pg31100.txt +churchhill. pg31100.txt +churchill pg31100.txt +churchill's pg31100.txt +churchill, pg31100.txt +churchill," pg31100.txt +churchill--only pg31100.txt +churchill. pg31100.txt +churchill." pg31100.txt +churchill?" pg31100.txt +churchills pg31100.txt +churchills, pg31100.txt +churchills. pg31100.txt +churchills." pg31100.txt +churchman pg3200.txt +churchman. pg100.txt +churchman? pg100.txt +churchmen pg100.txt +churchmen, pg3200.txt, pg100.txt +churchmen. pg3200.txt +churchyard, pg3200.txt, pg100.txt +churchyard. pg3200.txt, pg100.txt +churl, pg100.txt +churlish pg100.txt +churn pg3200.txt +churn, pg3200.txt, pg100.txt +churning pg3200.txt +chuse pg31100.txt +chuse." pg31100.txt +chuse; pg31100.txt +chuses pg31100.txt +chuses, pg31100.txt +chuses." pg31100.txt +chute pg3200.txt +chute?" pg3200.txt +cicero pg100.txt +cicero, pg3200.txt, pg100.txt +cicero. pg100.txt +cicerone: pg3200.txt +cid--" pg3200.txt +cid?" pg3200.txt +cigar pg3200.txt +cigar!" pg3200.txt +cigar, pg3200.txt +cigar-- pg3200.txt +cigar-box pg3200.txt +cigar-box." pg3200.txt +cigar. pg3200.txt +cigar." pg3200.txt +cigar; pg3200.txt +cigarette pg3200.txt +cigarette, pg3200.txt +cigarettes--made pg3200.txt +cigars pg3200.txt +cigars" pg3200.txt +cigars, pg3200.txt +cigars. pg3200.txt +cigars." pg3200.txt +cigars?" pg3200.txt +cimber pg3200.txt, pg100.txt +cimber. pg100.txt +cimber? pg100.txt +cimmerian pg100.txt +cincinnati pg3200.txt +cincinnati, pg3200.txt +cincinnati." pg3200.txt +cincture pg100.txt +cinder; pg3200.txt +cinderella pg3200.txt +cinders pg100.txt +cinders, pg3200.txt +cinders. pg3200.txt +cinna, pg100.txt +cinna. pg100.txt +cinna? pg100.txt +cinque-pace pg100.txt +cinque-ports. pg100.txt +cipher pg31100.txt, pg3200.txt +cipher, pg100.txt +cipher. pg100.txt +cipher." pg3200.txt +ciphered pg3200.txt +ciphering pg3200.txt +ciphering, pg3200.txt +ciphering. pg3200.txt +circle pg31100.txt, pg3200.txt, pg100.txt +circle, pg31100.txt, pg3200.txt +circle. pg31100.txt, pg3200.txt +circle." pg31100.txt +circle; pg31100.txt, pg3200.txt, pg100.txt +circles pg3200.txt +circles, pg31100.txt, pg3200.txt +circles--for pg3200.txt +circles. pg3200.txt +circlet pg3200.txt +circuit pg3200.txt +circuit, pg3200.txt +circuit; pg31100.txt +circular pg3200.txt +circular. pg3200.txt +circulars." pg3200.txt +circulate pg3200.txt +circulated pg3200.txt +circulation pg3200.txt +circulation, pg3200.txt +circumference pg3200.txt +circumference, pg3200.txt +circumference. pg3200.txt, pg100.txt +circumlocution pg3200.txt +circumnavigator, pg3200.txt +circumscrib'd pg100.txt +circumspect pg3200.txt +circumspect. pg100.txt +circumstance pg31100.txt, pg3200.txt, pg100.txt +circumstance! pg3200.txt +circumstance, pg31100.txt, pg3200.txt, pg100.txt +circumstance--of pg31100.txt +circumstance--to pg31100.txt +circumstance. pg3200.txt, pg100.txt +circumstance: pg3200.txt +circumstance; pg31100.txt, pg100.txt +circumstance? pg100.txt +circumstanced pg31100.txt +circumstanced, pg31100.txt +circumstances pg31100.txt, pg3200.txt, pg100.txt +circumstances!" pg3200.txt +circumstances, pg31100.txt, pg3200.txt, pg100.txt +circumstances--feelings pg31100.txt +circumstances--in-- pg3200.txt +circumstances--or pg3200.txt +circumstances. pg31100.txt, pg3200.txt +circumstances." pg31100.txt +circumstances: pg31100.txt, pg3200.txt +circumstances; pg31100.txt, pg3200.txt +circumstances? pg3200.txt +circumstantial pg3200.txt +circumstantiality pg3200.txt +circus pg3200.txt +circus, pg3200.txt +circus. pg3200.txt +circusing, pg3200.txt +cistern, pg3200.txt +cistern. pg3200.txt +cisterns. pg3200.txt +citadel, pg100.txt +citadel- pg100.txt +citadel. pg3200.txt, pg100.txt +citadel; pg100.txt +cite pg3200.txt, pg100.txt +cite, pg3200.txt +cited) pg3200.txt +cited, pg100.txt +cities pg3200.txt, pg100.txt +cities, pg3200.txt, pg100.txt +cities--a pg3200.txt +cities. pg3200.txt, pg100.txt +cities; pg3200.txt +citing, pg3200.txt +citizen pg3200.txt, pg100.txt +citizen's pg3200.txt +citizen, pg3200.txt, pg100.txt +citizen. pg3200.txt +citizen." pg3200.txt +citizen: pg3200.txt +citizens pg3200.txt, pg100.txt +citizens! pg100.txt +citizens, pg3200.txt, pg100.txt +citizens. pg3200.txt, pg100.txt +citizens; pg100.txt +citizens? pg100.txt +citizenship pg3200.txt +citizenship, pg3200.txt +citizenship. pg3200.txt +citron; pg3200.txt +cittern-head. pg100.txt +city pg3200.txt, pg100.txt +city" pg3200.txt +city's pg3200.txt +city, pg3200.txt, pg100.txt +city,' pg3200.txt +city-- pg3200.txt +city--for pg3200.txt +city--i pg3200.txt +city--that pg3200.txt +city-full, pg3200.txt +city. pg31100.txt, pg3200.txt, pg100.txt +city." pg3200.txt +city.' pg3200.txt +city: pg3200.txt +city; pg3200.txt, pg100.txt +city? pg100.txt +city] pg100.txt +civet pg100.txt +civet. pg100.txt +civil pg31100.txt, pg3200.txt +civil, pg31100.txt, pg100.txt +civil." pg31100.txt +civil; pg31100.txt, pg100.txt +civilian pg3200.txt +civilians pg3200.txt +civilisation pg3200.txt +civilisation. pg3200.txt +civilisation; pg3200.txt +civilisations pg3200.txt +civilities pg31100.txt +civilities, pg31100.txt +civilities. pg31100.txt +civility pg31100.txt, pg100.txt +civility, pg31100.txt, pg3200.txt, pg100.txt +civility. pg31100.txt +civility." pg31100.txt +civility: pg31100.txt +civility; pg31100.txt +civilization pg3200.txt +civilization, pg3200.txt +civilization. pg3200.txt +civilization." pg3200.txt +civilization; pg3200.txt +civilization?" pg3200.txt +civilizations pg3200.txt +civilizations, pg3200.txt +civilize pg3200.txt +civilized pg3200.txt +civilized! pg3200.txt +civilized, pg3200.txt +civilized. pg3200.txt +civilly pg31100.txt, pg100.txt +civilly?" pg31100.txt +clack pg3200.txt +clack-clack! pg3200.txt +clack-clack, pg3200.txt +clack." pg3200.txt +clack.) pg3200.txt +clad pg3200.txt, pg100.txt +clad, pg100.txt +clad--just pg3200.txt +clagett pg3200.txt +claim pg31100.txt, pg3200.txt, pg100.txt +claim, pg31100.txt, pg3200.txt, pg100.txt +claim. pg31100.txt, pg3200.txt, pg100.txt +claim." pg3200.txt +claim; pg31100.txt, pg100.txt +claim? pg100.txt +claimant pg3200.txt +claimant; pg3200.txt +claimants pg3200.txt +claimants, pg3200.txt +claimants. pg3200.txt +claimants? pg3200.txt +claimantship pg3200.txt +claimed pg31100.txt, pg3200.txt +claimed, pg3200.txt +claimed--that pg3200.txt +claimed. pg3200.txt +claiming pg3200.txt +claims pg31100.txt, pg3200.txt, pg100.txt +claims, pg31100.txt, pg3200.txt +claims. pg3200.txt, pg100.txt +claims: pg3200.txt +claims; pg3200.txt, pg100.txt +claire pg3200.txt +clairvoyant pg3200.txt +clam. pg3200.txt +clam." pg3200.txt +clamber pg3200.txt +clammy pg3200.txt +clamor pg3200.txt +clamor, pg3200.txt +clamor; pg3200.txt +clamorous pg3200.txt +clamors pg3200.txt +clamors. pg100.txt +clamour pg31100.txt, pg3200.txt, pg100.txt +clamour! pg100.txt +clamped pg3200.txt +clams. pg3200.txt +clan pg3200.txt +clandestine pg3200.txt +clandestinely pg3200.txt +clang pg3200.txt +clang--clang--clang--clang--clang-clang--clang--clang--clang pg3200.txt +clang? pg100.txt +clanging pg3200.txt +clank pg3200.txt +clank--joan pg3200.txt +clankety-clank pg3200.txt +clanking pg3200.txt +clans pg3200.txt +clansmen. pg3200.txt +clap pg3200.txt, pg100.txt +clap. pg100.txt +clap? pg100.txt +clapham, pg31100.txt +clapham. pg31100.txt +clapped pg3200.txt +clapperdogeons pg3200.txt +claps pg3200.txt +clara pg3200.txt +clara's pg3200.txt +clara's." pg3200.txt +clara. pg3200.txt +clare. pg100.txt +clarence pg3200.txt, pg100.txt +clarence! pg100.txt +clarence, pg100.txt +clarence. pg3200.txt, pg100.txt +clarence." pg3200.txt +clarence; pg3200.txt, pg100.txt +clarence? pg100.txt +clarence?" pg3200.txt +clarence?' pg100.txt +claribel pg100.txt +claribel. pg100.txt +clarinet pg3200.txt +clarity--in pg3200.txt +clark pg3200.txt +clark. pg3200.txt +clarke pg3200.txt +clarke." pg31100.txt +clarke: pg3200.txt +clarkes; pg31100.txt +clarkson, pg3200.txt +clash pg3200.txt +clashing pg3200.txt +clasp pg3200.txt +clasped pg3200.txt +clasping pg3200.txt +class pg31100.txt, pg3200.txt +class, pg31100.txt, pg3200.txt +class. pg3200.txt +class." pg31100.txt +classes pg3200.txt +classes, pg3200.txt +classes. pg3200.txt +classes.' pg3200.txt +classes; pg3200.txt +classic pg3200.txt +classification pg3200.txt +classified pg3200.txt +classing pg31100.txt +clatter pg3200.txt +clatter--" pg3200.txt +clatter. pg3200.txt +clattered pg3200.txt +clattered. pg3200.txt +clattering pg3200.txt +claude pg3200.txt +claudia." pg3200.txt +claudio pg100.txt +claudio! pg100.txt +claudio's pg100.txt +claudio's: pg100.txt +claudio, pg100.txt +claudio. pg100.txt +claudio.] pg100.txt +claudio; pg100.txt +claudio? pg100.txt +claudio] pg100.txt +claudio]. pg100.txt +claus, pg3200.txt +clause pg3200.txt +clause, pg100.txt +clause: pg3200.txt +clauses pg3200.txt +clave pg3200.txt +clavier, pg3200.txt +claw pg3200.txt, pg100.txt +claw'd pg100.txt +clawin' pg3200.txt +clawing pg3200.txt +claws pg3200.txt, pg100.txt +claws, pg3200.txt +clay pg31100.txt, pg3200.txt, pg100.txt +clay's pg31100.txt, pg3200.txt +clay's. pg31100.txt +clay, pg3200.txt, pg100.txt +clay- pg100.txt +clay--half pg3200.txt +clay. pg31100.txt, pg3200.txt, pg100.txt +clay." pg3200.txt +clay; pg31100.txt +clay? pg100.txt +clay?" pg3200.txt +clayey, pg3200.txt +clayton pg3200.txt +clayton's, pg3200.txt +clayton's--and pg3200.txt +clayton, pg3200.txt +clayton. pg3200.txt +clayton." pg3200.txt +clean pg31100.txt, pg3200.txt, pg100.txt +clean, pg3200.txt, pg100.txt +clean-shaven pg3200.txt +clean-timber'd. pg100.txt +clean. pg3200.txt +clean." pg3200.txt +clean; pg100.txt +cleaned pg3200.txt +cleaner pg3200.txt +cleanest pg3200.txt +cleanliness pg3200.txt +cleanliness, pg3200.txt +cleanliness. pg3200.txt +cleanliness; pg3200.txt +cleanly, pg3200.txt +cleanly; pg3200.txt +cleanse pg31100.txt, pg3200.txt +cleansed pg3200.txt +cleansed? pg31100.txt +cleansings pg3200.txt +clear pg31100.txt, pg3200.txt, pg100.txt +clear'd pg100.txt +clear'd: pg100.txt +clear, pg31100.txt, pg3200.txt, pg100.txt +clear-burning pg3200.txt +clear-cut pg3200.txt +clear. pg3200.txt +clear." pg3200.txt +clear: pg3200.txt, pg100.txt +clear; pg3200.txt, pg100.txt +clear?" pg3200.txt +clear?' pg3200.txt +cleared pg31100.txt, pg3200.txt +cleared. pg3200.txt +clearer pg3200.txt +clearer, pg3200.txt, pg100.txt +clearer." pg31100.txt +clearest pg3200.txt +clearing pg31100.txt +clearly pg31100.txt, pg3200.txt, pg100.txt +clearly, pg31100.txt, pg100.txt +clearly. pg3200.txt +clearness pg3200.txt +clearness, pg100.txt +clears, pg100.txt +clears. pg100.txt +cleats pg3200.txt +cleave pg3200.txt +clefts, pg3200.txt +cleitus. pg100.txt +clemency, pg3200.txt, pg100.txt +clemens pg3200.txt +clemens' pg3200.txt +clemens's pg3200.txt +clemens's): pg3200.txt +clemens) pg3200.txt +clemens, pg3200.txt +clemens-- pg3200.txt +clemens. pg3200.txt +clemens." pg3200.txt +clemens.) pg3200.txt +clemens: pg3200.txt +clemens?" pg3200.txt +clemens] pg3200.txt +clemenses pg3200.txt +clemenses. pg3200.txt +clench!' pg3200.txt +clenched pg3200.txt +cleomenes; pg100.txt +cleopatra pg3200.txt, pg100.txt +cleopatra, pg3200.txt, pg100.txt +cleopatra- pg100.txt +cleopatra. pg100.txt +cleopatra? pg100.txt +cleopatra] pg100.txt +clept pg100.txt +clergy pg31100.txt, pg3200.txt, pg100.txt +clergy. pg31100.txt +clergy." pg3200.txt +clergy?" pg3200.txt +clergyman pg31100.txt, pg3200.txt, pg100.txt +clergyman, pg31100.txt, pg3200.txt +clergyman. pg31100.txt, pg3200.txt +clergyman." pg31100.txt +clergyman: pg3200.txt +clergyman; pg31100.txt, pg3200.txt +clergyman?" pg31100.txt +clergymen pg100.txt +clergymen! pg100.txt +clergymen, pg3200.txt +clergymen--backed pg3200.txt +clergymen. pg31100.txt +clerical pg3200.txt +clerk pg31100.txt, pg3200.txt, pg100.txt +clerk's pg3200.txt +clerk, pg100.txt +clerk-- pg3200.txt +clerk. pg3200.txt, pg100.txt +clerk." pg3200.txt +clerk; pg3200.txt, pg100.txt +clerk? pg3200.txt +clerks pg3200.txt +clerks, pg3200.txt, pg100.txt +clerks. pg3200.txt +cleveland, pg31100.txt, pg3200.txt +cleveland. pg31100.txt +cleveland."-- pg31100.txt +cleveland?" pg31100.txt +clever pg31100.txt, pg3200.txt +clever, pg31100.txt +clever." pg31100.txt +cleverest pg31100.txt +cleverness pg3200.txt +cleverness. pg31100.txt +clew." pg3200.txt +clew; pg100.txt +clews? pg3200.txt +click-click pg3200.txt +clicking pg3200.txt +client." pg3200.txt +clients, pg3200.txt +clients,--to pg3200.txt +clients. pg3200.txt, pg100.txt +cliff pg3200.txt, pg100.txt +cliff, pg3200.txt +cliff. pg3200.txt +cliff; pg100.txt +clifford pg100.txt +clifford, pg100.txt +clifford. pg100.txt +clifford; pg100.txt +cliffs pg3200.txt +cliffs, pg3200.txt, pg100.txt +clifton pg31100.txt +clifton. pg31100.txt +climate pg3200.txt +climate, pg3200.txt +climate. pg3200.txt, pg100.txt +climate." pg3200.txt +climates pg3200.txt +climates. pg3200.txt +climax pg3200.txt +climax, pg3200.txt +climax. pg3200.txt +climaxes, pg3200.txt +climb pg3200.txt, pg100.txt +climb, pg3200.txt, pg100.txt +climb. pg3200.txt +climb] pg100.txt +climbed pg3200.txt +climbed. pg3200.txt +climbing pg3200.txt +climbing, pg3200.txt +climbing. pg3200.txt +climbing; pg100.txt +climbs pg3200.txt +clime pg100.txt +clime, pg100.txt +clime. pg100.txt +clime." pg3200.txt +clime; pg100.txt +clime? pg100.txt +climes pg31100.txt, pg3200.txt +climes, pg3200.txt +clincher pg3200.txt +clincher: pg3200.txt +clinching pg3200.txt +cling pg31100.txt, pg3200.txt +clinging pg31100.txt, pg3200.txt +clings pg3200.txt +clink pg31100.txt, pg3200.txt +clink. pg100.txt +clink; pg100.txt +clinking pg100.txt +clip pg3200.txt, pg100.txt +clipp'd pg100.txt +clipped pg3200.txt +clipper. pg100.txt +clipping pg100.txt +clique pg3200.txt +clique. pg3200.txt +clitus! pg100.txt +cliv.]-- pg3200.txt +clive pg3200.txt +clive, pg3200.txt +clo'es pg3200.txt, pg100.txt +cloak pg31100.txt, pg3200.txt, pg100.txt +cloak, pg100.txt +cloak. pg100.txt +cloak." pg31100.txt +cloak? pg100.txt +cloakbag pg100.txt +cloaks pg3200.txt, pg100.txt +cloaks, pg100.txt +cloaks- pg100.txt +cloaks; pg3200.txt, pg100.txt +clock pg31100.txt, pg3200.txt, pg100.txt +clock!" pg3200.txt +clock, pg31100.txt, pg3200.txt, pg100.txt +clock--moorish pg3200.txt +clock--say pg3200.txt +clock-face pg3200.txt +clock-faces, pg3200.txt +clock-strikes. pg3200.txt +clock. pg3200.txt, pg100.txt +clock." pg3200.txt +clock: pg100.txt +clock; pg3200.txt +clocks pg3200.txt +clocks, pg3200.txt +clocks; pg100.txt +clockwork. pg3200.txt +clod pg3200.txt, pg100.txt +clod; pg3200.txt +clodded pg3200.txt +clodding pg3200.txt +clog. pg3200.txt, pg100.txt +clogged. pg3200.txt +clogs--benches pg3200.txt +cloister? pg100.txt +cloisters pg31100.txt +clos pg3200.txt +close pg31100.txt, pg3200.txt, pg100.txt +close, pg3200.txt, pg100.txt +close-stool pg100.txt +close-stool, pg100.txt +close. pg3200.txt, pg100.txt +close." pg31100.txt +close: pg3200.txt +close; pg3200.txt, pg100.txt +close? pg3200.txt +closed pg31100.txt, pg3200.txt +closed, pg31100.txt, pg3200.txt +closed. pg31100.txt, pg3200.txt +closely pg3200.txt +closely, pg3200.txt +closely. pg3200.txt +closer pg3200.txt +closer, pg3200.txt +closer. pg3200.txt +closes pg3200.txt +closest pg3200.txt +closet pg31100.txt, pg3200.txt, pg100.txt +closet, pg31100.txt, pg3200.txt, pg100.txt +closet-war; pg100.txt +closet. pg31100.txt, pg100.txt +closet." pg31100.txt +closet?" pg31100.txt +closet] pg100.txt +closing pg31100.txt, pg3200.txt +closing. pg3200.txt +cloten pg100.txt +cloten, pg100.txt +cloten. pg100.txt +clotens pg100.txt +cloth pg3200.txt, pg100.txt +cloth, pg3200.txt, pg100.txt +cloth," pg3200.txt +cloth. pg100.txt +clothair, pg100.txt +clothe pg3200.txt +clothed pg3200.txt +clothed, pg3200.txt +clothed--dressed, pg3200.txt +clothes pg31100.txt, pg3200.txt, pg100.txt +clothes"--and pg3200.txt +clothes, pg31100.txt, pg3200.txt, pg100.txt +clothes," pg3200.txt +clothes--just pg3200.txt +clothes--plaid pg3200.txt +clothes--the pg3200.txt +clothes--which pg3200.txt +clothes-line pg3200.txt +clothes-lines pg3200.txt +clothes. pg31100.txt, pg3200.txt, pg100.txt +clothes." pg3200.txt +clothes.] pg100.txt +clothes; pg3200.txt, pg100.txt +clothes? pg3200.txt, pg100.txt +clothes?" pg3200.txt +clothesline." pg3200.txt +clothing pg31100.txt, pg3200.txt +clothing, pg3200.txt +clothing-store!" pg3200.txt +clothing. pg3200.txt +clothing? pg3200.txt +cloths pg3200.txt +clotpoles pg100.txt +clotted pg3200.txt +cloud pg31100.txt, pg3200.txt, pg100.txt +cloud, pg3200.txt, pg100.txt +cloud-bank pg3200.txt +cloud-banks pg3200.txt +cloud-rack! pg3200.txt +cloud-rack. pg3200.txt +cloud-wreaths; pg3200.txt +cloud. pg3200.txt, pg100.txt +cloud." pg3200.txt +clouded pg31100.txt, pg3200.txt +clouded. pg3200.txt, pg100.txt +cloudiness? pg100.txt +clouding pg3200.txt +cloudless, pg3200.txt +clouds pg3200.txt, pg100.txt +clouds, pg3200.txt, pg100.txt +clouds. pg3200.txt, pg100.txt +clouds." pg3200.txt +clouds; pg3200.txt, pg100.txt +cloudy pg3200.txt +cloudy, pg3200.txt +cloudy. pg100.txt +clout pg100.txt +clout. pg100.txt +clouts. pg100.txt +cloven pg3200.txt +cloven. pg100.txt +cloven? pg100.txt +clover, pg100.txt +cloves. pg100.txt +clovis, pg3200.txt +clown pg3200.txt, pg100.txt +clown! pg100.txt +clown!' pg3200.txt +clown. pg100.txt +clown.] pg100.txt +clownish pg31100.txt, pg3200.txt +clowns pg100.txt +cloy pg100.txt +cloy'd pg100.txt +club pg3200.txt, pg100.txt +club" pg3200.txt +club's pg3200.txt +club, pg3200.txt +club," pg3200.txt +club,--i pg3200.txt +club-house, pg3200.txt +club-nights." pg31100.txt +club. pg3200.txt, pg100.txt +club." pg3200.txt +club; pg3200.txt +club? pg3200.txt, pg100.txt +club?" pg3200.txt +clubbing pg3200.txt +clubbing.' pg3200.txt +clubs pg3200.txt, pg100.txt +clubs, pg3200.txt +clubs. pg3200.txt, pg100.txt +clue pg3200.txt +clue, pg3200.txt +clue. pg3200.txt +clues pg3200.txt +clues." pg3200.txt +clumb pg3200.txt +clumbus' pg3200.txt +clumsiness. pg3200.txt +clumsy pg3200.txt +clung pg3200.txt, pg100.txt +cluster pg31100.txt, pg3200.txt +clustering pg3200.txt +clusters pg3200.txt +clusters, pg100.txt +clusters. pg100.txt +clutch, pg100.txt +clutched pg3200.txt +clutches pg3200.txt +clutching pg3200.txt +clutter pg3200.txt +co'fil' pg100.txt +co's. pg3200.txt +co-heirs; pg100.txt +co-operation pg3200.txt +co. pg3200.txt +co.). pg3200.txt +co.-- pg3200.txt +co.: pg3200.txt +coach pg31100.txt, pg3200.txt, pg100.txt +coach, pg31100.txt, pg3200.txt +coach-fellow, pg100.txt +coach-house. pg31100.txt +coach. pg31100.txt +coach." pg31100.txt +coaches pg3200.txt +coachmaker's pg31100.txt +coachmakers. pg100.txt +coachman pg3200.txt +coachman's pg31100.txt +coachman. pg31100.txt, pg3200.txt +coact, pg100.txt +coal pg3200.txt, pg100.txt +coal!" pg3200.txt +coal, pg3200.txt +coal-bins pg3200.txt +coal-black, pg100.txt +coal-burner pg3200.txt +coal. pg3200.txt +coal." pg3200.txt +coal; pg3200.txt, pg100.txt +coal?" pg3200.txt +coals pg3200.txt +coals. pg100.txt +coarse pg3200.txt +coarse, pg3200.txt +coarseness, pg31100.txt, pg3200.txt +coarseness. pg3200.txt +coarser, pg31100.txt +coarsest pg3200.txt +coast pg3200.txt, pg100.txt +coast, pg3200.txt +coast- pg100.txt +coast. pg3200.txt, pg100.txt +coasters. pg3200.txt +coasts pg100.txt +coasts? pg100.txt +coat pg3200.txt, pg100.txt +coat, pg3200.txt, pg100.txt +coat--and pg3200.txt +coat-tail pg3200.txt +coat-tail, pg3200.txt +coat-tails pg3200.txt +coat. pg3200.txt, pg100.txt +coat." pg31100.txt +coat; pg3200.txt, pg100.txt +coated pg3200.txt +coating pg3200.txt +coating--now pg3200.txt +coats pg3200.txt, pg100.txt +coats!'] pg100.txt +coats, pg3200.txt +coats. pg3200.txt, pg100.txt +coats; pg100.txt +coax pg3200.txt +coaxed pg3200.txt +coaxing pg3200.txt +coaxing, pg3200.txt +cobb, pg31100.txt +cobb. pg31100.txt +cobbler, pg3200.txt +cobbler. pg100.txt +cobbler." pg3200.txt +cobbler; pg3200.txt +cobham, pg31100.txt, pg100.txt +cobloaf! pg100.txt +cobweb pg3200.txt, pg100.txt +cobweb. pg3200.txt, pg100.txt +cobweb: pg3200.txt +cobweb? pg100.txt +cobwebs pg3200.txt +cobwebs, pg3200.txt +cocaine. pg3200.txt +cocchiere pg3200.txt +cocchiere. pg3200.txt +cochin pg3200.txt +cock pg3200.txt, pg100.txt +cock'rel. pg100.txt +cock-a-diddle-dow. pg100.txt +cock-tail." pg3200.txt +cock. pg100.txt +cock; pg100.txt +cockade pg31100.txt +cockatrice. pg100.txt +cockatrices. pg100.txt +cocked pg3200.txt +cockroaches, pg3200.txt +cocks pg3200.txt +cocks! pg100.txt +cocktail pg3200.txt +cocky pg3200.txt +cocoa pg3200.txt +cocoa, pg3200.txt +cocoa-nut pg3200.txt +cocoa-palms pg3200.txt +cocoanut pg3200.txt +cocoanut--and pg3200.txt +cocoanut." pg3200.txt +cocoanuts, pg3200.txt +cocoanuts; pg3200.txt +coctus! pg100.txt +code pg3200.txt +code, pg3200.txt +codpieces, pg100.txt +cods, pg100.txt +coeli pg3200.txt +coeli: pg3200.txt +coelum, pg3200.txt +coeur-de-lion, pg100.txt +coeval pg3200.txt +cof--' pg3200.txt +coffee pg3200.txt +coffee, pg31100.txt, pg3200.txt +coffee-pot pg3200.txt +coffee-pot, pg3200.txt +coffee. pg31100.txt, pg3200.txt +coffee." pg3200.txt +coffee.' pg3200.txt +coffee.--a pg31100.txt +coffee; pg3200.txt +coffee;" pg3200.txt +coffee? pg3200.txt +coffer. pg100.txt +coffer; pg100.txt +coffers pg100.txt +coffers. pg100.txt +coffin pg31100.txt, pg3200.txt, pg100.txt +coffin, pg3200.txt +coffin-bearers pg3200.txt +coffin. pg3200.txt +coffin." pg31100.txt +coffin?" pg3200.txt +coffin?' pg3200.txt +coffin] pg100.txt +coffins pg3200.txt +coffins. pg3200.txt +cog pg3200.txt +cog, pg100.txt +cogged; pg3200.txt +cogging pg100.txt +cogitated, pg3200.txt +cogitating. pg3200.txt +cogitation pg100.txt +cogitations. pg100.txt +cognition pg100.txt +cognizance. pg100.txt +cogscomb pg100.txt +cohabitants. pg100.txt +coherence; pg3200.txt +cohorts, pg100.txt +coif! pg100.txt +coigns pg3200.txt +coil pg3200.txt, pg100.txt +coil, pg100.txt +coil? pg100.txt +coiled pg3200.txt +coin pg3200.txt, pg100.txt +coin!" pg3200.txt +coin'd pg100.txt +coin, pg3200.txt +coin. pg3200.txt, pg100.txt +coin." pg3200.txt +coin] pg100.txt +coinage pg3200.txt +coincide pg3200.txt +coincide, pg31100.txt +coincided. pg3200.txt +coincidence pg3200.txt +coincidence. pg3200.txt +coincidence; pg31100.txt +coincidences. pg3200.txt +coincidences? pg3200.txt +coincident pg3200.txt +coins pg3200.txt +coins, pg3200.txt +col. pg3200.txt +col? pg100.txt +colbrand, pg100.txt +cold pg31100.txt, pg3200.txt, pg100.txt +cold! pg3200.txt +cold!--then pg3200.txt +cold, pg31100.txt, pg3200.txt, pg100.txt +cold--which, pg31100.txt +cold. pg31100.txt, pg3200.txt, pg100.txt +cold." pg31100.txt, pg3200.txt +cold.' pg100.txt +cold; pg31100.txt, pg3200.txt, pg100.txt +cold? pg100.txt +cold?" pg3200.txt +cold?' pg3200.txt +colder pg3200.txt +coldly pg3200.txt, pg100.txt +coldly, pg31100.txt +coldly-- pg3200.txt +coldly. pg3200.txt, pg100.txt +coldly: pg3200.txt +coldly? pg100.txt +coldness pg31100.txt, pg3200.txt +coldness. pg31100.txt +cole pg31100.txt +cole's pg31100.txt +cole's; pg31100.txt +cole, pg31100.txt +cole. pg31100.txt +cole." pg31100.txt +coleridge, pg3200.txt +coles pg31100.txt +coles, pg31100.txt +coles--what pg31100.txt +colfax pg3200.txt +colic, pg3200.txt +colicky pg3200.txt +coliseum pg3200.txt +coliseum, pg3200.txt +coliseum. pg3200.txt +coliseum; pg3200.txt +collaborations; pg3200.txt +collapse. pg3200.txt +collapsed pg3200.txt +collapsed. pg3200.txt +collar pg3200.txt +collar, pg3200.txt +collar. pg3200.txt, pg100.txt +collarless pg3200.txt +collars, pg3200.txt +collated pg3200.txt +collation, pg31100.txt +collation. pg31100.txt +colleague, pg3200.txt +collect pg31100.txt, pg3200.txt +collect, pg3200.txt +collect. pg3200.txt +collect." pg31100.txt +collectable. pg3200.txt +collected pg31100.txt, pg3200.txt, pg100.txt +collected, pg31100.txt, pg100.txt +collected. pg31100.txt, pg3200.txt +collecting pg31100.txt, pg3200.txt +collecting, pg3200.txt +collecting; pg3200.txt +collection pg31100.txt, pg3200.txt, pg100.txt +collection, pg3200.txt +collection--forty-five pg3200.txt +collection-plate pg3200.txt +collection. pg3200.txt +collection." pg3200.txt +collection; pg3200.txt +collections, pg3200.txt +collectively pg3200.txt +collectively. pg3200.txt +collector pg3200.txt +collector. pg3200.txt +collectors, pg3200.txt +college pg31100.txt, pg3200.txt, pg100.txt +college! pg3200.txt +college". pg100.txt +college, pg31100.txt, pg3200.txt +college-bred, pg3200.txt +college-bred. pg3200.txt +college. pg3200.txt +college." pg3200.txt +college.' pg3200.txt +college: pg3200.txt +college; pg3200.txt +colleges pg3200.txt, pg100.txt +colleges'--he pg3200.txt +colleges; pg3200.txt +collided pg3200.txt +collie, pg3200.txt +collied, pg100.txt +collier! pg100.txt +colliers. pg100.txt +collins pg31100.txt, pg3200.txt +collins!" pg31100.txt +collins" pg31100.txt +collins's pg31100.txt +collins, pg31100.txt +collins. pg31100.txt, pg3200.txt +collins." pg31100.txt +collinses pg31100.txt +collision pg3200.txt +collision?" pg3200.txt +collisions pg3200.txt +collisions, pg3200.txt +collusion pg3200.txt +collusion?--agreement?" pg3200.txt +colmekill, pg100.txt +colombo pg3200.txt +colombo! pg3200.txt +colombo!" pg3200.txt +colonel pg31100.txt, pg3200.txt +colonel!" pg3200.txt +colonel's pg31100.txt, pg3200.txt +colonel, pg31100.txt, pg3200.txt +colonel--pleaded pg3200.txt +colonel. pg3200.txt +colonel." pg3200.txt +colonel: pg3200.txt +colonel?" pg3200.txt +colonels pg3200.txt +colonels, pg3200.txt +colonial pg3200.txt +colonies pg3200.txt +colonies. pg3200.txt +colonists pg3200.txt +colonists. pg3200.txt +colonnade pg3200.txt +colonnades pg3200.txt +colony pg3200.txt +colony's pg3200.txt +colony, pg3200.txt +colony," pg3200.txt +colony. pg3200.txt +color pg3200.txt +color, pg3200.txt +color--everywhere pg3200.txt +color--the pg3200.txt +color. pg3200.txt, pg100.txt +color." pg3200.txt +color; pg3200.txt +color?" pg3200.txt +colorado pg3200.txt +colored pg3200.txt +colored. pg3200.txt +coloring!" pg3200.txt +coloring. pg3200.txt +colorist--" pg3200.txt +colorless pg3200.txt +colorless, pg3200.txt +colors pg3200.txt +colors, pg3200.txt +colors-- pg3200.txt +colors. pg3200.txt, pg100.txt +colors: pg3200.txt +colors; pg3200.txt +colossal pg3200.txt +colossal! pg3200.txt +colossal; pg3200.txt +colossi,' pg3200.txt +colossus pg3200.txt +colossus: pg3200.txt +colour pg31100.txt, pg3200.txt, pg100.txt +colour's pg100.txt +colour, pg31100.txt, pg100.txt +colour--and pg3200.txt +colour. pg31100.txt, pg100.txt +colour; pg100.txt +colour? pg100.txt +colourable pg100.txt +coloured pg31100.txt, pg3200.txt, pg100.txt +coloured, pg31100.txt +colouring pg31100.txt +colouring. pg100.txt +colourless, pg3200.txt +colours pg31100.txt, pg3200.txt, pg100.txt +colours, pg100.txt +colours. pg100.txt +colours." pg31100.txt +colours.' pg100.txt +colours; pg100.txt +cols. pg3200.txt +colts, pg100.txt +columbia pg3200.txt +columbia, pg3200.txt +columbia: pg3200.txt +columbiad. pg3200.txt +columbine. pg100.txt +columbus pg3200.txt +columbus, pg3200.txt +columbus. pg3200.txt +columbus..... pg3200.txt +columella's." pg31100.txt +column pg3200.txt +column, pg3200.txt +column--three pg3200.txt +column. pg3200.txt +column; pg3200.txt +columned pg3200.txt +columns pg3200.txt +columns, pg3200.txt +columns. pg3200.txt +columns: pg3200.txt +colville pg100.txt +colville. pg100.txt +colville? pg100.txt +com'st pg100.txt +com'st. pg100.txt +com- pg3200.txt +com--" pg3200.txt +comart pg100.txt +comaum pg3200.txt +comb pg3200.txt, pg100.txt +combat pg31100.txt, pg100.txt +combat! pg3200.txt +combat, pg100.txt +combat; pg3200.txt, pg100.txt +combatant, pg100.txt +combatants pg3200.txt +combatants! pg100.txt +combatants' pg3200.txt +combatants. pg3200.txt, pg100.txt +combatants: pg100.txt +combated. pg100.txt +combats pg3200.txt +combe pg31100.txt +combe--i pg31100.txt +combed pg3200.txt +combinate pg100.txt +combination pg31100.txt, pg3200.txt, pg100.txt +combination--it pg3200.txt +combination; pg3200.txt +combination? pg3200.txt +combinations pg3200.txt +combine pg31100.txt, pg3200.txt, pg100.txt +combine. pg100.txt +combine; pg3200.txt +combined pg3200.txt, pg100.txt +combined! pg3200.txt +combined, pg100.txt +combined. pg3200.txt +combines pg3200.txt +combining pg3200.txt +come! pg31100.txt, pg3200.txt, pg100.txt +come!" pg3200.txt +come!' pg3200.txt +come!--but pg31100.txt +come', pg100.txt +come) pg100.txt +come, pg31100.txt, pg3200.txt, pg100.txt +come- pg100.txt +come-- pg3200.txt +come--" pg31100.txt +come--everybody; pg3200.txt +come--for pg31100.txt +come--i pg3200.txt +come--she pg3200.txt +come--sit pg3200.txt +come--to pg3200.txt +come--where pg3200.txt +come. pg31100.txt, pg3200.txt, pg100.txt +come." pg31100.txt, pg3200.txt +come.' pg3200.txt, pg100.txt +come.'" pg31100.txt +come.- pg100.txt +come: pg3200.txt, pg100.txt +come; pg31100.txt, pg3200.txt, pg100.txt +come? pg31100.txt, pg3200.txt, pg100.txt +come?" pg31100.txt, pg3200.txt +come?"-- pg31100.txt +come?' pg3200.txt +comedian? pg100.txt +comedians pg3200.txt, pg100.txt +comedies pg3200.txt +comedy pg31100.txt, pg3200.txt, pg100.txt +comedy, pg31100.txt, pg3200.txt, pg100.txt +comedy. pg100.txt +comedy; pg3200.txt, pg100.txt +comeliness pg3200.txt +comeliness. pg3200.txt +comely pg3200.txt, pg100.txt +comely." pg3200.txt +comer pg3200.txt +comers pg3200.txt +comers, pg3200.txt +comes pg31100.txt, pg3200.txt, pg100.txt +comes! pg100.txt +comes!" pg3200.txt +comes"--alkali pg3200.txt +comes't pg100.txt +comes, pg31100.txt, pg3200.txt, pg100.txt +comes- pg100.txt +comes. pg31100.txt, pg3200.txt, pg100.txt +comes." pg31100.txt, pg3200.txt +comes: pg3200.txt, pg100.txt +comes; pg31100.txt, pg100.txt +comes? pg100.txt +comest pg100.txt +comet pg3200.txt +comet. pg3200.txt +cometh! pg3200.txt +cometh." pg3200.txt +comets pg3200.txt +comets, pg3200.txt +comets. pg3200.txt +comfort pg31100.txt, pg3200.txt, pg100.txt +comfort! pg100.txt +comfort!" pg31100.txt +comfort, pg31100.txt, pg3200.txt, pg100.txt +comfort--but pg31100.txt +comfort--his pg31100.txt +comfort--i'd pg3200.txt +comfort. pg31100.txt, pg3200.txt, pg100.txt +comfort." pg3200.txt +comfort: pg31100.txt +comfort; pg31100.txt, pg3200.txt, pg100.txt +comfort? pg31100.txt, pg3200.txt, pg100.txt +comfortable pg31100.txt, pg3200.txt +comfortable, pg31100.txt, pg3200.txt +comfortable-looking. pg3200.txt +comfortable. pg31100.txt, pg3200.txt, pg100.txt +comfortable." pg31100.txt, pg3200.txt +comfortable: pg3200.txt +comfortable; pg31100.txt, pg3200.txt +comfortable? pg3200.txt, pg100.txt +comfortable?" pg3200.txt +comfortable?' pg3200.txt +comfortably pg31100.txt, pg3200.txt +comfortably, pg3200.txt +comfortably. pg31100.txt, pg3200.txt +comfortably; pg31100.txt, pg3200.txt +comfortably?" pg3200.txt +comforted pg31100.txt, pg3200.txt +comforted. pg31100.txt, pg3200.txt, pg100.txt +comforted; pg3200.txt +comforter pg3200.txt, pg100.txt +comforter, pg3200.txt +comforter. pg3200.txt, pg100.txt +comforter." pg3200.txt +comforter; pg3200.txt +comforters pg31100.txt +comforters." pg3200.txt +comforting pg31100.txt, pg3200.txt +comforting. pg3200.txt +comforting: pg3200.txt +comfortless pg100.txt +comforts pg31100.txt, pg3200.txt, pg100.txt +comforts, pg100.txt +comforts. pg31100.txt +comic pg3200.txt +comic." pg31100.txt +comical pg3200.txt +comical. pg3200.txt +comically pg3200.txt +comin!" pg3200.txt +comin' pg3200.txt +coming pg31100.txt, pg3200.txt, pg100.txt +coming! pg31100.txt, pg3200.txt +coming!" pg3200.txt +coming, pg31100.txt, pg3200.txt, pg100.txt +coming--at pg31100.txt +coming--i pg3200.txt +coming--more pg3200.txt +coming. pg31100.txt, pg3200.txt, pg100.txt +coming." pg3200.txt +coming: pg31100.txt, pg100.txt +coming; pg31100.txt, pg100.txt +coming? pg3200.txt, pg100.txt +coming?" pg31100.txt, pg3200.txt +coming?--" pg31100.txt +comings pg3200.txt +comings-in? pg100.txt +cominius pg100.txt +cominius, pg100.txt +cominius. pg100.txt +command pg31100.txt, pg3200.txt, pg100.txt +command! pg100.txt +command!" pg3200.txt +command'st. pg100.txt +command, pg31100.txt, pg3200.txt, pg100.txt +command-- pg3200.txt +command--and pg3200.txt +command. pg31100.txt, pg3200.txt, pg100.txt +command." pg31100.txt, pg3200.txt +command.' pg3200.txt +command: pg3200.txt +command; pg3200.txt, pg100.txt +command? pg3200.txt, pg100.txt +command?' pg100.txt +command?--yes." pg31100.txt +commandant pg3200.txt +commanded pg3200.txt, pg100.txt +commanded, pg3200.txt +commanded--" pg3200.txt +commanded. pg31100.txt, pg3200.txt, pg100.txt +commanded; pg100.txt +commander pg3200.txt +commander, pg3200.txt +commander- pg100.txt +commander-in-chief pg3200.txt +commander-in-chief. pg3200.txt +commander; pg100.txt +commanders pg3200.txt, pg100.txt +commanders, pg100.txt +commanders: pg100.txt +commanding pg31100.txt, pg3200.txt, pg100.txt +commanding, pg3200.txt +commanding." pg3200.txt +commandment pg3200.txt, pg100.txt +commandment, pg3200.txt +commandment. pg100.txt +commandment; pg100.txt +commandments pg3200.txt +commandments." pg3200.txt +commands pg3200.txt, pg100.txt +commands, pg31100.txt, pg3200.txt, pg100.txt +commands. pg31100.txt, pg3200.txt, pg100.txt +commands: pg3200.txt +commands; pg100.txt +commands?" pg3200.txt +comme pg3200.txt +commemorate pg3200.txt +commemorative pg3200.txt +commence pg100.txt +commence, pg3200.txt, pg100.txt +commence." pg3200.txt +commence: pg100.txt +commenced pg31100.txt, pg3200.txt +commenced. pg3200.txt +commencement pg31100.txt +commend pg31100.txt, pg100.txt +commend. pg100.txt +commend; pg100.txt +commendable pg100.txt +commendable, pg31100.txt, pg100.txt +commendable. pg100.txt +commendable; pg3200.txt, pg100.txt +commendation pg31100.txt +commendation. pg3200.txt, pg100.txt +commendation." pg31100.txt +commendation.' pg100.txt +commendations pg31100.txt, pg3200.txt, pg100.txt +commendations, pg100.txt +commendations; pg100.txt +commended pg31100.txt, pg3200.txt, pg100.txt +commended, pg100.txt +commended. pg100.txt +commending pg31100.txt, pg3200.txt, pg100.txt +commends, pg100.txt +commends. pg100.txt +commends; pg100.txt +comment pg3200.txt, pg100.txt +comment, pg3200.txt +comment--my pg3200.txt +comment. pg31100.txt, pg3200.txt, pg100.txt +comment." pg3200.txt +comment: pg3200.txt +comment; pg3200.txt +comment? pg3200.txt +commentaries pg3200.txt +commentaries, pg3200.txt +commentaries. pg3200.txt +commentator pg3200.txt +commentator. pg3200.txt +commentators pg3200.txt +commented pg3200.txt +commenting pg3200.txt, pg100.txt +commenting. pg3200.txt +comments pg31100.txt +comments--undoubting pg31100.txt +comments. pg3200.txt +commerce pg3200.txt +commerce, pg31100.txt, pg3200.txt +commerce. pg3200.txt +commerce; pg3200.txt +commerceless--forgotten pg3200.txt +commercial pg31100.txt, pg3200.txt, pg100.txt +commercial'-- pg3200.txt +commercial, pg3200.txt +commercial. pg3200.txt +commercially. pg3200.txt +commingled pg100.txt +commiseration pg3200.txt +commiseration, pg31100.txt +commiseration. pg100.txt +commiserations, pg3200.txt +commission pg31100.txt, pg3200.txt, pg100.txt +commission, pg31100.txt, pg100.txt +commission-- pg3200.txt +commission. pg3200.txt, pg100.txt +commission." pg31100.txt +commissioned pg3200.txt +commissioner pg3200.txt +commissioner. pg3200.txt +commissioners? pg100.txt +commissioning pg31100.txt +commissions pg100.txt +commissions, pg31100.txt, pg3200.txt +commissions. pg100.txt +commit pg3200.txt, pg100.txt +commit, pg100.txt +commit. pg3200.txt, pg100.txt +commit; pg100.txt +commits pg3200.txt +commits, pg100.txt +commits. pg100.txt +committed pg31100.txt, pg3200.txt, pg100.txt +committed! pg100.txt +committed, pg100.txt +committed- pg100.txt +committed. pg3200.txt, pg100.txt +committed." pg3200.txt +committed; pg3200.txt, pg100.txt +committed? pg100.txt +committee pg31100.txt, pg3200.txt +committee's pg3200.txt +committee, pg3200.txt +committee. pg3200.txt +committee." pg3200.txt +committee: pg3200.txt +committeemen pg3200.txt +committees pg3200.txt +committees" pg3200.txt +committees, pg3200.txt +committees. pg3200.txt +committing pg31100.txt, pg3200.txt +committing. pg31100.txt +commix pg100.txt +commixed. pg100.txt +commode pg3200.txt +commodious pg3200.txt +commodities pg3200.txt +commodity pg3200.txt, pg100.txt +commodity, pg100.txt +commodity. pg100.txt +commodity? pg100.txt +commodore pg3200.txt +common pg31100.txt, pg3200.txt, pg100.txt +common, pg31100.txt, pg3200.txt +common--they pg3200.txt +common-place pg31100.txt +common. pg31100.txt, pg3200.txt, pg100.txt +common." pg31100.txt, pg3200.txt +common: pg3200.txt +common; pg31100.txt +common? pg100.txt +common?" pg3200.txt +commonalty pg3200.txt +commonalty. pg100.txt +commoner! pg100.txt +commoner's pg3200.txt +commoner, pg3200.txt +commoners. pg3200.txt, pg100.txt +commonest pg3200.txt +commonly pg31100.txt, pg3200.txt, pg100.txt +commonplace pg31100.txt, pg3200.txt +commonplace, pg3200.txt +commonplace. pg3200.txt +commonplace; pg31100.txt +commonplaces pg3200.txt +commons pg100.txt +commons, pg31100.txt, pg3200.txt, pg100.txt +commons. pg100.txt +commonweal pg100.txt +commonweal's. pg100.txt +commonweal, pg100.txt +commonweal. pg100.txt +commonweal; pg100.txt +commonwealth pg3200.txt, pg100.txt +commonwealth! pg100.txt +commonwealth, pg3200.txt, pg100.txt +commonwealth. pg3200.txt, pg100.txt +commonwealth: pg100.txt +commonwealth; pg100.txt +commotion pg3200.txt, pg100.txt +commotion, pg100.txt +commotion. pg3200.txt +commu- pg3200.txt +commune, pg3200.txt +communes pg3200.txt +communicate pg31100.txt, pg3200.txt +communicate. pg31100.txt, pg100.txt +communicate; pg31100.txt +communicated pg31100.txt, pg3200.txt +communicated, pg31100.txt +communicates pg3200.txt +communicating pg31100.txt, pg3200.txt +communication pg31100.txt, pg3200.txt, pg100.txt +communication!" pg3200.txt +communication-- pg31100.txt +communication. pg31100.txt +communications pg31100.txt +communications" pg3200.txt +communicative pg31100.txt +communicative, pg31100.txt +communicative; pg31100.txt +communing pg3200.txt +communings. pg3200.txt +communion, pg3200.txt +communions pg3200.txt +communities pg3200.txt +communities, pg3200.txt, pg100.txt +communities; pg3200.txt +community pg3200.txt +community's pg3200.txt +community, pg3200.txt, pg100.txt +community. pg3200.txt +community." pg3200.txt +como. pg3200.txt +como: pg3200.txt +como; pg3200.txt +comonty pg100.txt +compact pg31100.txt, pg3200.txt, pg100.txt +compact, pg3200.txt, pg100.txt +compact. pg100.txt +compact; pg31100.txt +compact? pg100.txt +compacted pg3200.txt +compactest pg3200.txt +compactly pg3200.txt +compactness. pg3200.txt +companies pg3200.txt, pg100.txt +companies. pg3200.txt, pg100.txt +companies." pg3200.txt +companies; pg3200.txt +companion pg31100.txt, pg3200.txt +companion! pg100.txt +companion's pg31100.txt +companion, pg31100.txt, pg100.txt +companion," pg31100.txt +companion--and, pg31100.txt +companion-way pg3200.txt +companion-way, pg3200.txt +companion. pg31100.txt, pg3200.txt, pg100.txt +companion." pg31100.txt +companion..... pg3200.txt +companion; pg31100.txt +companionable pg3200.txt +companioned pg3200.txt +companions pg31100.txt, pg3200.txt, pg100.txt +companions!" pg3200.txt +companions, pg3200.txt, pg100.txt +companions. pg3200.txt +companions; pg31100.txt, pg3200.txt +companions? pg31100.txt +companionship pg3200.txt +companionship. pg3200.txt, pg100.txt +companionway pg3200.txt +companionway, pg3200.txt +company pg31100.txt, pg3200.txt, pg100.txt +company! pg3200.txt, pg100.txt +company!- pg100.txt +company's pg3200.txt +company) pg3200.txt +company). pg3200.txt +company, pg31100.txt, pg3200.txt, pg100.txt +company," pg3200.txt +company--"if pg31100.txt +company--[the pg3200.txt +company--a pg3200.txt +company--all pg3200.txt +company--and pg3200.txt +company--nothing pg3200.txt +company. pg31100.txt, pg3200.txt, pg100.txt +company." pg31100.txt, pg3200.txt +company.' pg3200.txt +company._] pg31100.txt +company: pg3200.txt +company; pg31100.txt, pg3200.txt, pg100.txt +company? pg100.txt +company?" pg3200.txt +comparable pg3200.txt +comparable. pg3200.txt +comparable." pg3200.txt +comparative pg31100.txt +comparative. pg31100.txt +comparative; pg100.txt +comparatively pg31100.txt, pg3200.txt +compare pg31100.txt, pg3200.txt, pg100.txt +compare, pg100.txt +compare. pg100.txt +compared pg31100.txt, pg3200.txt, pg100.txt +compares pg31100.txt +comparing pg31100.txt, pg3200.txt +comparison pg31100.txt, pg3200.txt, pg100.txt +comparison, pg31100.txt, pg3200.txt +comparison- pg100.txt +comparison. pg31100.txt, pg3200.txt, pg100.txt +comparison." pg31100.txt, pg3200.txt +comparison.--she pg31100.txt +comparison? pg100.txt +comparisons pg31100.txt, pg3200.txt +compartment pg31100.txt, pg3200.txt +compartment--tells pg3200.txt +compartment. pg3200.txt +compartments pg3200.txt +compass pg3200.txt +compass, pg3200.txt, pg100.txt +compass. pg3200.txt, pg100.txt +compass." pg31100.txt +compass.' pg3200.txt +compass: pg31100.txt, pg3200.txt +compass? pg100.txt +compassed pg3200.txt +compasses, pg100.txt +compassing pg3200.txt +compassion pg31100.txt, pg3200.txt, pg100.txt +compassion, pg31100.txt, pg3200.txt, pg100.txt +compassion. pg31100.txt, pg3200.txt +compassion." pg31100.txt +compassion: pg3200.txt +compassion; pg3200.txt +compassionate pg31100.txt, pg3200.txt +compassionate. pg31100.txt, pg3200.txt +compassionate; pg100.txt +compatible pg31100.txt +compel pg3200.txt +compel. pg100.txt +compel; pg100.txt +compell'd, pg100.txt +compell'd. pg100.txt +compelled pg31100.txt, pg3200.txt +compelling pg3200.txt +compels pg3200.txt +compensate pg3200.txt +compensated pg3200.txt +compensates pg3200.txt +compensating pg3200.txt +compensation pg3200.txt +compensation, pg3200.txt +compensation? pg3200.txt +compete pg3200.txt +competence, pg31100.txt +competence?" pg31100.txt +competency pg3200.txt, pg100.txt +competent pg3200.txt, pg100.txt +competent, pg3200.txt +competent. pg3200.txt +competent." pg31100.txt +competent; pg3200.txt +competing. pg3200.txt +competition pg31100.txt, pg3200.txt +competition, pg3200.txt +competition--and pg3200.txt +competition. pg3200.txt +competitor pg3200.txt, pg100.txt +competitor. pg100.txt +competitor." pg3200.txt +competitor; pg3200.txt +competitors pg31100.txt, pg100.txt +competitors, pg100.txt +compiegne pg3200.txt +compiegne, pg3200.txt +compiegne." pg3200.txt +compiegne? pg3200.txt +compiegne?" pg3200.txt +compile, pg100.txt +compile; pg100.txt +compiled pg3200.txt, pg100.txt +compiled, pg100.txt +complacency pg31100.txt +complacency, pg31100.txt, pg3200.txt +complacency. pg31100.txt +complacency: pg3200.txt +complacent pg3200.txt +complain pg3200.txt, pg100.txt +complain, pg3200.txt, pg100.txt +complain. pg3200.txt, pg100.txt +complain? pg3200.txt +complained pg31100.txt, pg3200.txt +complained, pg31100.txt, pg3200.txt +complained. pg3200.txt +complaining pg3200.txt, pg100.txt +complaining, pg3200.txt +complaining. pg31100.txt, pg3200.txt, pg100.txt +complaint pg3200.txt, pg100.txt +complaint, pg31100.txt, pg3200.txt, pg100.txt +complaint. pg31100.txt, pg3200.txt, pg100.txt +complaint." pg3200.txt +complaint; pg31100.txt +complaints pg31100.txt, pg100.txt +complaints, pg31100.txt +complaints. pg31100.txt, pg3200.txt, pg100.txt +complaints." pg3200.txt +complaisance pg31100.txt +complaisance!" pg31100.txt +complaisance, pg31100.txt +complaisance. pg31100.txt +complaisance; pg31100.txt +complaisant pg31100.txt +compleat pg31100.txt +complected pg3200.txt +complement pg3200.txt +complement, pg100.txt +complement--it pg3200.txt +complements pg3200.txt +complete pg31100.txt, pg3200.txt, pg100.txt +complete, pg31100.txt, pg3200.txt, pg100.txt +complete- pg100.txt +complete. pg31100.txt, pg3200.txt +complete." pg31100.txt, pg3200.txt +complete: pg3200.txt +complete; pg3200.txt, pg100.txt +completed pg31100.txt, pg3200.txt +completed, pg31100.txt, pg3200.txt +completed. pg3200.txt +completed." pg3200.txt +completed.' pg3200.txt +completed; pg31100.txt +completely pg31100.txt, pg3200.txt +completely. pg3200.txt +completely; pg31100.txt +completeness, pg3200.txt +completeness. pg3200.txt +completer pg3200.txt +completest pg3200.txt +completing pg3200.txt +completion pg3200.txt +completion, pg31100.txt +completion. pg3200.txt +completion; pg3200.txt +complex pg3200.txt +complex; pg3200.txt +complexion pg31100.txt, pg3200.txt, pg100.txt +complexion! pg100.txt +complexion, pg31100.txt, pg3200.txt, pg100.txt +complexion--" pg3200.txt +complexion. pg3200.txt, pg100.txt +complexion." pg31100.txt +complexion? pg100.txt +complexion?" pg31100.txt +complexions pg3200.txt +complexions, pg3200.txt +complexions--or pg3200.txt +complexions? pg100.txt +compliance pg31100.txt, pg3200.txt, pg100.txt +compliance, pg31100.txt +compliance. pg31100.txt, pg3200.txt, pg100.txt +complicated pg3200.txt +complicated. pg3200.txt +complication pg3200.txt +complicazioni. pg3200.txt +complices pg100.txt +complices, pg100.txt +complices. pg100.txt +complies] pg100.txt +compliment pg31100.txt, pg3200.txt, pg100.txt +compliment! pg100.txt +compliment, pg31100.txt, pg3200.txt, pg100.txt +compliment," pg31100.txt +compliment. pg31100.txt, pg3200.txt, pg100.txt +compliment." pg31100.txt, pg3200.txt +complimentary pg3200.txt +complimentary. pg3200.txt +complimented pg3200.txt +complimented. pg31100.txt +compliments pg31100.txt, pg3200.txt +compliments, pg31100.txt, pg3200.txt +compliments. pg31100.txt, pg3200.txt +complots? pg100.txt +comply pg100.txt +comply, pg3200.txt +complying pg31100.txt, pg3200.txt +complying. pg31100.txt +complying; pg31100.txt +compos'd pg100.txt +compose pg31100.txt, pg3200.txt, pg100.txt +compose--god pg3200.txt +compose. pg31100.txt +composed pg31100.txt, pg3200.txt +composed, pg31100.txt +composed. pg31100.txt +composedly pg31100.txt, pg3200.txt +composedly: pg3200.txt +composer. pg31100.txt +composing, pg3200.txt +composing-gait. pg3200.txt +composing. pg3200.txt +composition pg31100.txt, pg3200.txt, pg100.txt +composition! pg100.txt +composition, pg3200.txt +composition. pg3200.txt, pg100.txt +composition; pg100.txt +compositions pg3200.txt +compositor." pg3200.txt +compositors, pg3200.txt +composure pg31100.txt, pg3200.txt, pg100.txt +composure, pg31100.txt +composure-- pg3200.txt +composure. pg31100.txt +composure: pg3200.txt +compound pg3200.txt, pg100.txt +compound, pg100.txt +compound. pg3200.txt, pg100.txt +compounded pg31100.txt +compounded, pg100.txt +compounded. pg100.txt +compounding; pg3200.txt +compounds pg100.txt +compounds, pg3200.txt, pg100.txt +comprehend pg31100.txt, pg3200.txt, pg100.txt +comprehend, pg3200.txt +comprehend. pg31100.txt, pg3200.txt, pg100.txt +comprehend; pg3200.txt +comprehended pg100.txt +comprehended, pg3200.txt +comprehended. pg3200.txt +comprehending pg31100.txt, pg3200.txt +comprehends pg31100.txt, pg3200.txt +comprehends. pg100.txt +comprehensible. pg3200.txt +comprehension pg31100.txt, pg3200.txt +comprehension, pg3200.txt +comprehension. pg31100.txt, pg3200.txt +comprehension." pg31100.txt, pg3200.txt +comprehensive pg3200.txt +comprehensive. pg3200.txt +comprehensively pg3200.txt +compressed pg3200.txt +compressed. pg3200.txt +compressing pg3200.txt +compression pg3200.txt +compris'd pg100.txt +comprise pg31100.txt +comprised pg31100.txt +comprises pg3200.txt +compromis'd pg100.txt +compromise pg3200.txt, pg100.txt +compromise, pg3200.txt, pg100.txt +compromise. pg31100.txt +compromised, pg3200.txt +compt, pg100.txt +compt. pg100.txt +comptroller pg3200.txt +comptrollers. pg100.txt +compts pg31100.txt +compulsion pg3200.txt +compulsion! pg100.txt +compulsion. pg3200.txt, pg100.txt +compulsion; pg100.txt +compulsions pg3200.txt +compulsory pg3200.txt +compulsory; pg3200.txt +compunction pg3200.txt +computation. pg3200.txt +computers pg31100.txt, pg3200.txt, pg100.txt +comrade pg3200.txt +comrade. pg3200.txt +comrade: pg3200.txt +comraded pg3200.txt +comradeless pg3200.txt +comrades pg3200.txt +comrades! pg3200.txt +comrades, pg3200.txt +comrades-- pg3200.txt +comrades--for pg3200.txt +comrades. pg3200.txt +comrades: pg3200.txt +comrades; pg3200.txt +comradeship pg3200.txt +comstock pg3200.txt +comstock. pg3200.txt +comtemplating pg3200.txt +con pg3200.txt, pg100.txt +con-- pg3200.txt +conceal pg31100.txt, pg3200.txt, pg100.txt +conceal!" pg31100.txt +conceal'd pg100.txt +conceal'd. pg100.txt +conceal, pg100.txt +conceal." pg31100.txt +conceal; pg100.txt +concealed pg31100.txt, pg3200.txt +concealed. pg31100.txt, pg3200.txt +concealed?" pg31100.txt +concealing pg31100.txt, pg3200.txt +concealment pg31100.txt, pg3200.txt, pg100.txt +concealment, pg31100.txt +concealment. pg3200.txt, pg100.txt +concealment." pg31100.txt +concede pg3200.txt +concede. pg3200.txt +conceded pg3200.txt +conceding pg3200.txt +conceit pg31100.txt, pg3200.txt, pg100.txt +conceit, pg31100.txt, pg3200.txt, pg100.txt +conceit- pg100.txt +conceit. pg31100.txt, pg100.txt +conceit; pg100.txt +conceited pg31100.txt, pg3200.txt +conceited. pg31100.txt +conceited? pg100.txt +conceitless, pg100.txt +conceits, pg100.txt +conceiv'd, pg100.txt +conceivable pg3200.txt +conceive pg31100.txt, pg3200.txt, pg100.txt +conceive, pg100.txt +conceive. pg31100.txt, pg100.txt +conceive; pg100.txt +conceive? pg100.txt +conceived pg31100.txt, pg3200.txt +conceived, pg100.txt +conceived. pg31100.txt, pg3200.txt +conceiving, pg31100.txt +concentrated pg3200.txt +concentrates pg3200.txt +concentration pg3200.txt +concentration, pg3200.txt +conception pg31100.txt, pg3200.txt, pg100.txt +conception!" pg3200.txt +conception, pg3200.txt +conception. pg100.txt +conception; pg31100.txt +concern pg31100.txt, pg3200.txt, pg100.txt +concern!--well, pg31100.txt +concern". pg31100.txt +concern, pg31100.txt, pg3200.txt +concern. pg31100.txt, pg3200.txt +concern; pg31100.txt +concern?' pg3200.txt +concerned pg31100.txt, pg3200.txt +concerned! pg31100.txt, pg3200.txt +concerned, pg3200.txt +concerned--and pg3200.txt +concerned--i pg31100.txt +concerned. pg31100.txt, pg3200.txt +concerned." pg31100.txt, pg3200.txt +concerned.) pg3200.txt +concerned; pg31100.txt, pg3200.txt +concerneth pg100.txt +concerning pg31100.txt, pg3200.txt, pg100.txt +concerns pg31100.txt, pg3200.txt, pg100.txt +concerns, pg31100.txt, pg100.txt +concerns. pg31100.txt, pg3200.txt, pg100.txt +concerns." pg31100.txt +concerns; pg100.txt +concert pg31100.txt, pg3200.txt +concert) pg3200.txt +concert, pg31100.txt, pg3200.txt +concert-room; pg3200.txt +concert-vocalist. pg3200.txt +concert. pg3200.txt +concert." pg31100.txt +concert: pg3200.txt +concert?" pg31100.txt +concerto-- pg31100.txt +concerts. pg3200.txt +concession pg31100.txt, pg3200.txt +concessions pg31100.txt, pg3200.txt +conchology. pg3200.txt +conchology?" pg3200.txt +conchyliaceous pg3200.txt +conciliator, pg3200.txt +concise pg31100.txt, pg3200.txt +concise, pg3200.txt +concise. pg31100.txt +conclave pg3200.txt +conclaves pg3200.txt +conclude pg31100.txt, pg3200.txt, pg100.txt +conclude, pg100.txt +conclude; pg31100.txt +conclude?" pg31100.txt +concluded pg31100.txt, pg3200.txt, pg100.txt +concluded, pg31100.txt, pg3200.txt, pg100.txt +concluded. pg31100.txt, pg3200.txt, pg100.txt +concluded? pg100.txt +concludes pg100.txt +concludes, pg3200.txt +concludes. pg100.txt +concludes: pg100.txt +concluding pg31100.txt, pg3200.txt +concluding, pg3200.txt +concluding: pg3200.txt +conclusion pg31100.txt, pg3200.txt, pg100.txt +conclusion' pg3200.txt +conclusion, pg3200.txt, pg100.txt +conclusion--thus: pg3200.txt +conclusion. pg31100.txt, pg3200.txt, pg100.txt +conclusion: pg3200.txt +conclusion; pg3200.txt +conclusions pg3200.txt +conclusions. pg100.txt +conclusions?' pg3200.txt +conclusive? pg3200.txt +conclusively pg3200.txt +conclusively, pg3200.txt +conclusiveness pg3200.txt +conclusiveness, pg3200.txt +concocted pg3200.txt +concolinel] pg100.txt +concord pg3200.txt +concord, pg3200.txt +concourse pg3200.txt +concrete, pg3200.txt +concubine pg3200.txt +concubine. pg100.txt +concupy. pg100.txt +concurrence pg31100.txt +concurrence. pg31100.txt +concurrence." pg31100.txt +concussion pg3200.txt +concussions pg3200.txt +condemn pg31100.txt, pg3200.txt, pg100.txt +condemn'd pg100.txt +condemn'd, pg100.txt +condemn'd. pg100.txt +condemn'd: pg100.txt +condemn'd; pg100.txt +condemn, pg3200.txt +condemnation. pg3200.txt +condemned pg3200.txt, pg100.txt +condemned, pg3200.txt +condemned." pg3200.txt +condemned; pg3200.txt +condemns pg3200.txt +condemns. pg100.txt +condensations pg3200.txt +condense pg3200.txt +condensed pg3200.txt +condescend pg31100.txt, pg3200.txt +condescended pg3200.txt +condescending pg3200.txt +condescends pg31100.txt +condescension pg3200.txt +condescension, pg31100.txt +condescension. pg3200.txt +condition pg31100.txt, pg3200.txt, pg100.txt +condition! pg100.txt +condition" pg31100.txt +condition, pg31100.txt, pg3200.txt, pg100.txt +condition,) pg3200.txt +condition- pg100.txt +condition--cash, pg3200.txt +condition. pg31100.txt, pg3200.txt, pg100.txt +condition." pg31100.txt, pg3200.txt +condition: pg100.txt +condition; pg3200.txt, pg100.txt +condition? pg100.txt +condition?" pg3200.txt +condition_] pg31100.txt +conditional pg31100.txt +conditioned pg31100.txt +conditions pg31100.txt, pg3200.txt, pg100.txt +conditions, pg3200.txt +conditions--" pg3200.txt +conditions--an pg3200.txt +conditions. pg3200.txt, pg100.txt +conditions; pg3200.txt, pg100.txt +conditions?" pg3200.txt +condole pg3200.txt +condolence pg31100.txt +condolence, pg3200.txt +condolences pg3200.txt +condoles pg3200.txt +condoling. pg100.txt +condoning pg3200.txt +conduce pg100.txt +conducive pg3200.txt +conduct pg31100.txt, pg3200.txt, pg100.txt +conduct!" pg3200.txt +conduct, pg31100.txt, pg3200.txt, pg100.txt +conduct. pg31100.txt, pg3200.txt, pg100.txt +conduct." pg31100.txt, pg3200.txt +conduct.--but pg31100.txt +conduct; pg31100.txt +conduct? pg3200.txt +conduct?" pg31100.txt +conducted pg31100.txt, pg3200.txt +conducted, pg3200.txt +conducted. pg100.txt +conductor pg3200.txt +conductor's pg3200.txt +conductor. pg3200.txt +conductor?" pg3200.txt +conductor?' pg3200.txt +conductors pg3200.txt +conductors, pg3200.txt +conduit pg31100.txt, pg3200.txt +cone pg3200.txt +cone, pg3200.txt +conected; pg100.txt +cones pg3200.txt +coney. pg3200.txt +conf--" pg3200.txt +confection pg100.txt +confectionary; pg100.txt +confederacy pg100.txt +confederacy! pg100.txt +confederacy, pg100.txt +confederate pg3200.txt +confederate, pg3200.txt +confederates pg3200.txt, pg100.txt +confederates, pg100.txt +confederates. pg100.txt +confer pg3200.txt, pg100.txt +confer. pg100.txt +conference pg31100.txt, pg3200.txt, pg100.txt +conference, pg31100.txt, pg3200.txt, pg100.txt +conference. pg3200.txt, pg100.txt +conference? pg100.txt +conferr'd pg100.txt +conferred pg3200.txt +conferring pg31100.txt, pg3200.txt +confers pg3200.txt +confers, pg31100.txt +confess pg31100.txt, pg3200.txt, pg100.txt +confess!" pg3200.txt +confess'd pg100.txt +confess'd, pg100.txt +confess'd. pg100.txt +confess'd? pg100.txt +confess't. pg100.txt +confess, pg31100.txt, pg100.txt +confess- pg100.txt +confess--" pg3200.txt +confess. pg3200.txt, pg100.txt +confess." pg3200.txt +confess; pg31100.txt, pg3200.txt, pg100.txt +confess?" pg3200.txt +confessed pg31100.txt, pg3200.txt +confessed, pg3200.txt +confessed--though pg3200.txt +confessed. pg3200.txt +confessed." pg3200.txt +confessed?" pg3200.txt +confessedly pg3200.txt +confesses pg3200.txt, pg100.txt +confessing pg31100.txt, pg3200.txt, pg100.txt +confession pg31100.txt, pg3200.txt, pg100.txt +confession!--and pg3200.txt +confession, pg31100.txt, pg3200.txt +confession. pg3200.txt, pg100.txt +confession: pg3200.txt +confession:--i pg3200.txt +confession; pg100.txt +confession? pg3200.txt +confession?" pg3200.txt +confessional pg3200.txt +confessions pg3200.txt +confessions, pg100.txt +confessions. pg3200.txt +confessor pg100.txt +confessor, pg100.txt +confessor. pg100.txt +confidant?" pg31100.txt +confidante pg31100.txt +confidante, pg31100.txt +confidante. pg31100.txt +confidante." pg31100.txt +confidante?" pg31100.txt +confidantes, pg31100.txt +confide pg31100.txt +confided pg3200.txt +confidence pg31100.txt, pg3200.txt, pg100.txt +confidence! pg31100.txt +confidence, pg31100.txt, pg3200.txt +confidence--" pg3200.txt +confidence--no pg3200.txt +confidence. pg31100.txt, pg3200.txt, pg100.txt +confidence." pg3200.txt +confidence: pg3200.txt +confidence; pg31100.txt, pg3200.txt, pg100.txt +confidence?" pg31100.txt +confidences pg31100.txt +confidences--meaning pg3200.txt +confidences. pg3200.txt +confidences; pg3200.txt +confident pg31100.txt, pg3200.txt, pg100.txt +confident" pg3200.txt +confident, pg100.txt +confident--and, pg3200.txt +confident. pg3200.txt, pg100.txt +confidential pg31100.txt, pg3200.txt +confidential" pg3200.txt +confidential. pg3200.txt +confidentially pg3200.txt +confidentially, pg31100.txt +confidentially: pg3200.txt +confidently pg3200.txt +confidently, pg3200.txt +confiding pg3200.txt +confiding, pg31100.txt +confiding--everything pg31100.txt +confin'd pg100.txt +confin'd, pg100.txt +confin'd- pg100.txt +confin'd. pg100.txt +confine pg3200.txt, pg100.txt +confine, pg100.txt +confine. pg100.txt +confine; pg100.txt +confined pg31100.txt, pg3200.txt +confined! pg31100.txt +confined, pg100.txt +confined. pg100.txt +confined?" pg3200.txt +confinement pg31100.txt, pg3200.txt +confinement, pg31100.txt, pg3200.txt +confinement. pg3200.txt +confiners pg100.txt +confines pg3200.txt, pg100.txt +confines; pg3200.txt +confining pg31100.txt +confirm pg31100.txt, pg3200.txt, pg100.txt +confirm'd pg100.txt +confirm, pg100.txt +confirmation pg31100.txt, pg3200.txt, pg100.txt +confirmation- pg100.txt +confirmation. pg31100.txt, pg3200.txt, pg100.txt +confirmation; pg100.txt +confirmed pg31100.txt, pg3200.txt +confirmed, pg31100.txt +confirmed. pg3200.txt +confirmer pg100.txt +confirming pg31100.txt, pg3200.txt +confirms pg31100.txt, pg3200.txt, pg100.txt +confirms; pg100.txt +confiscate pg100.txt +confiscate, pg3200.txt +confiscate. pg100.txt +confiscated. pg3200.txt, pg100.txt +confiscation pg3200.txt +conflagration pg3200.txt +conflagration, pg3200.txt +conflagrations, pg3200.txt +conflict pg3200.txt +conflict, pg100.txt +conflict. pg3200.txt +conflicting pg3200.txt +conflicts pg3200.txt +conflicts, pg100.txt +conflicts. pg3200.txt +conform pg3200.txt +conformable, pg100.txt +conformity. pg3200.txt +confound pg3200.txt, pg100.txt +confound! pg100.txt +confound. pg100.txt +confound; pg100.txt +confounded pg31100.txt, pg100.txt +confounded. pg31100.txt +confounds pg100.txt +confounds, pg100.txt +confront pg3200.txt +confronted pg3200.txt +confronts pg3200.txt +confucius pg3200.txt +confus'd pg100.txt +confus'd, pg100.txt +confuse pg31100.txt, pg3200.txt +confused pg31100.txt, pg3200.txt, pg100.txt +confused, pg31100.txt, pg3200.txt +confused--"i pg31100.txt +confused--this pg3200.txt +confused. pg3200.txt +confused: pg100.txt +confused; pg3200.txt +confusedly pg3200.txt, pg100.txt +confusing pg3200.txt +confusing. pg3200.txt +confusion pg31100.txt, pg3200.txt, pg100.txt +confusion! pg100.txt +confusion!" pg31100.txt +confusion, pg31100.txt, pg3200.txt, pg100.txt +confusion. pg31100.txt, pg3200.txt, pg100.txt +confusion; pg31100.txt, pg100.txt +confusion?" pg3200.txt +congealed." pg3200.txt +congenial pg31100.txt, pg3200.txt +congest, pg100.txt +congied pg100.txt +congratulate pg31100.txt, pg3200.txt +congratulate, pg3200.txt +congratulated pg3200.txt +congratulated. pg31100.txt +congratulating pg31100.txt +congratulation pg31100.txt +congratulations pg31100.txt, pg3200.txt +congratulations, pg3200.txt +congratulations. pg31100.txt +congratulations.--harriet pg31100.txt +congratulatory pg3200.txt +congregate pg3200.txt +congregate, pg100.txt +congregation pg3200.txt +congregation. pg3200.txt +congregational pg3200.txt +congregationalists, pg3200.txt +congregationalists? pg3200.txt +congress pg3200.txt +congress, pg3200.txt +congress-water?" pg3200.txt +congress. pg3200.txt +congress." pg3200.txt +congress; pg3200.txt +congresses pg3200.txt +congressional pg3200.txt +congressman pg3200.txt +congressman, pg3200.txt +congressman. pg3200.txt +congressmen. pg3200.txt +congressmen? pg3200.txt +congruous pg3200.txt +congruous. pg3200.txt +conies pg100.txt +conjecture pg31100.txt, pg3200.txt, pg100.txt +conjecture, pg100.txt +conjecture. pg31100.txt, pg3200.txt +conjecture." pg31100.txt +conjectured pg31100.txt +conjectured, pg31100.txt +conjectures pg3200.txt +conjectures, pg31100.txt, pg100.txt +conjectures. pg31100.txt +conjectures; pg100.txt +conjecturing pg31100.txt +conjecturing. pg3200.txt +conjoin'd pg100.txt +conjugal pg31100.txt +conjunct pg100.txt +conjunction pg3200.txt, pg100.txt +conjunction! pg100.txt +conjunction, pg100.txt +conjunction. pg100.txt +conjunction; pg3200.txt +conjunctive pg100.txt +conjuration pg100.txt +conjure pg31100.txt, pg3200.txt +conjured pg3200.txt, pg100.txt +conjurer pg100.txt +conjurer, pg100.txt +conjurer. pg100.txt +conjurer: pg100.txt +conjurer? pg100.txt +conjurers, pg100.txt +conn pg3200.txt +conn, pg3200.txt +conn. pg3200.txt +conn., pg3200.txt +conn.: pg3200.txt +conn: pg3200.txt +connect pg31100.txt, pg3200.txt +connect, pg3200.txt +connect. pg3200.txt +connected pg31100.txt, pg3200.txt +connected, pg31100.txt, pg100.txt +connected. pg31100.txt +connected." pg31100.txt +connecticut pg3200.txt +connecticut). pg3200.txt +connecticut, pg3200.txt +connecticut,' pg3200.txt +connecticut. pg3200.txt +connecting pg3200.txt +connection pg31100.txt, pg3200.txt +connection, pg31100.txt, pg3200.txt +connection. pg31100.txt, pg3200.txt +connections pg31100.txt, pg3200.txt +connections, pg31100.txt +connections?" pg31100.txt +connects pg3200.txt +conned pg3200.txt +connexion pg31100.txt +connexion, pg31100.txt +connexion. pg31100.txt +connexions pg31100.txt +connexions, pg31100.txt +connive pg100.txt +connoisseurs pg3200.txt +connoisseurship, pg31100.txt +conquer pg31100.txt, pg3200.txt, pg100.txt +conquer'd pg100.txt +conquer'd! pg100.txt +conquer, pg100.txt +conquer. pg3200.txt +conquer; pg31100.txt, pg100.txt +conquer? pg3200.txt +conquered pg31100.txt, pg3200.txt +conquered! pg3200.txt +conquered, pg3200.txt +conquered--curiosities pg3200.txt +conquered. pg3200.txt, pg100.txt +conquered." pg3200.txt +conquered; pg3200.txt, pg100.txt +conquered? pg100.txt +conquering pg3200.txt +conqueror pg100.txt +conqueror! pg100.txt +conqueror, pg3200.txt, pg100.txt +conqueror. pg100.txt +conqueror; pg100.txt +conquerors pg3200.txt, pg100.txt +conquerors, pg100.txt +conquerors- pg100.txt +conquerors; pg100.txt +conquest pg31100.txt, pg3200.txt, pg100.txt +conquest, pg100.txt +conquest,' pg31100.txt +conquest. pg31100.txt, pg3200.txt, pg100.txt +conquest." pg31100.txt +conquest; pg100.txt +conquests pg3200.txt +conquests." pg31100.txt +conrad pg3200.txt +conrad!'" pg3200.txt +conrad's pg3200.txt +conrad, pg3200.txt +conrade! pg100.txt +conrade. pg100.txt +consanguinity, pg100.txt +consarns pg3200.txt +conscience pg31100.txt, pg3200.txt, pg100.txt +conscience! pg100.txt +conscience!" pg3200.txt +conscience, pg31100.txt, pg3200.txt, pg100.txt +conscience- pg100.txt +conscience--"i pg3200.txt +conscience--a pg3200.txt +conscience--and pg3200.txt +conscience--that pg3200.txt +conscience-free pg3200.txt +conscience. pg31100.txt, pg3200.txt, pg100.txt +conscience." pg31100.txt, pg3200.txt +conscience; pg3200.txt +conscience? pg3200.txt, pg100.txt +conscience?" pg3200.txt +conscienceless pg3200.txt +consciences pg3200.txt, pg100.txt +consciences. pg3200.txt +conscientious pg31100.txt, pg3200.txt +conscientious. pg3200.txt +conscientious; pg3200.txt +conscientiously, pg3200.txt +conscientiousness pg3200.txt +conscious pg31100.txt, pg3200.txt +consciousness pg31100.txt, pg3200.txt +consciousness, pg31100.txt, pg3200.txt +consciousness. pg31100.txt +consciousness." pg31100.txt +consciousness: pg3200.txt +consecrate pg100.txt +consecrate, pg100.txt +consecrate. pg100.txt +consecrated pg3200.txt +consecutive pg3200.txt +consent pg31100.txt, pg3200.txt, pg100.txt +consent, pg31100.txt, pg3200.txt, pg100.txt +consent," pg31100.txt +consent- pg100.txt +consent. pg31100.txt, pg3200.txt, pg100.txt +consent.' pg3200.txt +consent: pg100.txt +consent; pg31100.txt, pg100.txt +consent? pg100.txt +consent?" pg31100.txt +consented pg31100.txt, pg3200.txt, pg100.txt +consented, pg31100.txt, pg3200.txt +consented. pg31100.txt, pg3200.txt, pg100.txt +consenting pg31100.txt +consents pg3200.txt +consents! pg100.txt +consents--offers pg3200.txt +consents. pg100.txt +consequen- pg100.txt +consequence pg31100.txt, pg3200.txt, pg100.txt +consequence! pg100.txt +consequence!" pg3200.txt +consequence, pg31100.txt, pg3200.txt, pg100.txt +consequence- pg100.txt +consequence--so pg31100.txt +consequence. pg31100.txt, pg3200.txt +consequence." pg31100.txt, pg3200.txt +consequence: pg3200.txt, pg100.txt +consequence; pg31100.txt, pg3200.txt, pg100.txt +consequence? pg3200.txt +consequence?" pg31100.txt, pg3200.txt +consequences pg31100.txt, pg3200.txt +consequences!" pg3200.txt +consequences. pg31100.txt, pg3200.txt +consequences." pg31100.txt, pg3200.txt +consequences; pg3200.txt +consequent pg31100.txt, pg3200.txt +consequential pg3200.txt +consequently pg31100.txt, pg3200.txt +consequently, pg31100.txt, pg3200.txt +conservatories pg3200.txt +conserves, pg100.txt +conserves? pg100.txt +consider pg31100.txt, pg3200.txt, pg100.txt +consider! pg3200.txt +consider'd, pg100.txt +consider, pg31100.txt, pg3200.txt, pg100.txt +consider. pg100.txt +consider; pg100.txt +considerable pg31100.txt, pg3200.txt, pg100.txt +considerable, pg3200.txt +considerable. pg31100.txt, pg3200.txt +considerable; pg3200.txt +considerably pg31100.txt, pg3200.txt +considerably, pg3200.txt +considerably." pg31100.txt +considerate pg3200.txt +considerate, pg3200.txt +consideration pg31100.txt, pg3200.txt +consideration!" pg3200.txt +consideration). pg3200.txt +consideration, pg31100.txt, pg3200.txt, pg100.txt +consideration. pg31100.txt, pg3200.txt +consideration." pg31100.txt +consideration; pg31100.txt +considerations pg3200.txt +considerations; pg31100.txt +considered pg31100.txt, pg3200.txt +considered, pg31100.txt, pg3200.txt, pg100.txt +considered--but, pg31100.txt +considered. pg3200.txt, pg100.txt +considering pg31100.txt, pg3200.txt +considering. pg3200.txt, pg100.txt +considers pg31100.txt, pg3200.txt +consigned pg31100.txt, pg3200.txt +consist pg3200.txt, pg100.txt +consisted pg31100.txt, pg3200.txt +consisted. pg3200.txt +consistency pg31100.txt +consistent pg31100.txt, pg3200.txt +consistent, pg3200.txt +consistent." pg31100.txt +consistent; pg31100.txt +consistently pg3200.txt +consisting pg3200.txt +consisting, pg100.txt +consistory, pg100.txt +consists pg3200.txt, pg100.txt +consolation pg31100.txt, pg3200.txt +consolation, pg31100.txt +consolation- pg100.txt +consolation. pg31100.txt, pg3200.txt +consolation." pg31100.txt, pg3200.txt +consolation; pg31100.txt +consolation? pg31100.txt +console pg31100.txt, pg3200.txt +consoled pg31100.txt +consoling pg31100.txt +consoling; pg31100.txt +consonance pg31100.txt +consonant? pg100.txt +consonants pg3200.txt +consort pg100.txt +consort! pg100.txt +consort. pg100.txt +consort? pg100.txt +consorting pg3200.txt +conspicuous pg3200.txt +conspicuous, pg3200.txt +conspicuous. pg3200.txt +conspicuous." pg3200.txt +conspicuous; pg3200.txt +conspicuous?" pg3200.txt +conspicuously pg3200.txt +conspicuousness pg3200.txt +conspicuousness, pg3200.txt +conspicuousness. pg3200.txt +conspicuousness; pg3200.txt +conspir'd, pg100.txt +conspiracies pg3200.txt +conspiracies. pg3200.txt +conspiracy pg3200.txt, pg100.txt +conspiracy! pg100.txt +conspiracy, pg100.txt +conspiracy: pg100.txt +conspiracy; pg100.txt +conspiracy? pg100.txt +conspirator, pg100.txt +conspirator. pg100.txt +conspirators pg3200.txt, pg100.txt +conspirators-in-chief--" pg3200.txt +conspirators. pg100.txt +conspirators; pg100.txt +conspire pg3200.txt +conspire, pg100.txt +conspire? pg100.txt +conspired pg3200.txt +constable pg3200.txt, pg100.txt +constable! pg100.txt +constable, pg31100.txt, pg3200.txt, pg100.txt +constable-- pg100.txt +constable. pg100.txt +constable; pg3200.txt, pg100.txt +constable? pg100.txt +constables pg3200.txt +constance pg100.txt +constance! pg100.txt +constance; pg100.txt +constancies pg100.txt +constancy pg31100.txt, pg100.txt +constancy, pg31100.txt, pg3200.txt, pg100.txt +constancy. pg100.txt +constancy." pg31100.txt +constancy; pg100.txt +constant pg31100.txt, pg3200.txt, pg100.txt +constant, pg31100.txt, pg100.txt +constant. pg100.txt +constant." pg31100.txt +constant: pg100.txt +constant; pg3200.txt +constantine, pg100.txt +constantinople pg3200.txt +constantinople, pg3200.txt +constantinople--fantastic pg3200.txt +constantinople. pg3200.txt +constantly pg31100.txt, pg3200.txt, pg100.txt +constantly, pg3200.txt +constantly. pg3200.txt, pg100.txt +constantly." pg31100.txt +constellation pg3200.txt +constellation. pg3200.txt +constellation?" pg3200.txt +constellations pg3200.txt +consternation pg31100.txt, pg3200.txt +consternation, pg31100.txt, pg3200.txt +consternation. pg3200.txt +constituents pg3200.txt +constitute pg3200.txt +constituted pg31100.txt, pg3200.txt +constituted, pg3200.txt +constituting pg3200.txt +constitution pg31100.txt, pg3200.txt, pg100.txt +constitution! pg31100.txt +constitution, pg3200.txt +constitution. pg3200.txt +constitutional pg3200.txt +constrain pg31100.txt +constrain'd pg100.txt +constrain'd, pg100.txt +constrained pg3200.txt +constrained; pg3200.txt +constraineth pg100.txt +constrains pg100.txt +constraint pg3200.txt +constraint, pg100.txt +constraint. pg3200.txt +constraint; pg100.txt +constricted pg3200.txt +construct pg3200.txt +constructed pg3200.txt +constructed, pg3200.txt +constructed; pg3200.txt +constructing pg3200.txt +construction pg31100.txt, pg3200.txt, pg100.txt +construction, pg3200.txt +construction--on pg3200.txt +construction-work, pg3200.txt +construction. pg3200.txt, pg100.txt +constructions pg3200.txt +constructive pg3200.txt +construe pg100.txt +construed pg3200.txt +construing pg31100.txt +consul pg3200.txt, pg100.txt +consul! pg100.txt +consul, pg3200.txt, pg100.txt +consul-general pg3200.txt +consul. pg3200.txt, pg100.txt +consul? pg100.txt +consulate pg3200.txt +consuls pg3200.txt +consuls, pg3200.txt +consulship? pg100.txt +consulships? pg100.txt +consult pg31100.txt, pg3200.txt +consult. pg3200.txt, pg100.txt +consultation pg31100.txt, pg3200.txt +consultation, pg31100.txt, pg3200.txt +consultation. pg3200.txt +consultations pg3200.txt +consultations, pg31100.txt +consulted pg31100.txt, pg3200.txt +consulted, pg31100.txt +consulting pg31100.txt +consum'd pg100.txt +consum'd. pg100.txt +consume pg3200.txt +consumed pg31100.txt, pg3200.txt +consumed; pg31100.txt, pg3200.txt +consumedly." pg3200.txt +consuming pg3200.txt +consuming, pg3200.txt +consummate, pg100.txt +consummated pg3200.txt +consummating pg3200.txt +consummation pg3200.txt, pg100.txt +consummation. pg3200.txt +consummations pg3200.txt +consumption pg3200.txt, pg100.txt +consumption, pg3200.txt +consumption. pg3200.txt, pg100.txt +consumptive pg3200.txt +consumptives pg3200.txt +contact pg31100.txt, pg3200.txt, pg100.txt +contact--something pg3200.txt +contagion. pg100.txt +contain pg31100.txt, pg3200.txt, pg100.txt +contain'd, pg100.txt +contain, pg3200.txt, pg100.txt +contain? pg100.txt +contained pg31100.txt, pg3200.txt +contained, pg3200.txt +container, pg3200.txt +container. pg3200.txt +containing pg31100.txt, pg3200.txt, pg100.txt +contains pg3200.txt +contains, pg100.txt +contains. pg100.txt +contains." pg3200.txt +contaminate! pg100.txt +contaminated pg3200.txt +contaminated. pg100.txt +conte pg3200.txt +conte, pg3200.txt +contemn'd, pg100.txt +contemplate pg3200.txt +contemplate. pg3200.txt +contemplate; pg100.txt +contemplated pg31100.txt, pg3200.txt +contemplates, pg3200.txt +contemplating pg3200.txt +contemplation pg31100.txt, pg3200.txt, pg100.txt +contemplation, pg100.txt +contemplation--as pg31100.txt +contemplation. pg31100.txt, pg100.txt +contemplation; pg100.txt +contemplation? pg100.txt +contemplations pg3200.txt +contemplative pg3200.txt +contemplative; pg100.txt +contemporaneous pg3200.txt +contemporaries pg3200.txt +contempt pg31100.txt, pg3200.txt, pg100.txt +contempt!" pg3200.txt +contempt, pg31100.txt, pg3200.txt, pg100.txt +contempt. pg31100.txt, pg3200.txt, pg100.txt +contempt.--three-fifths pg3200.txt +contempt; pg3200.txt, pg100.txt +contempt? pg100.txt +contemptible pg31100.txt, pg3200.txt +contemptible. pg3200.txt +contemptuous pg31100.txt +contemptuous? pg3200.txt +contemptuousness pg3200.txt +contend pg31100.txt, pg3200.txt, pg100.txt +contend, pg100.txt +contend. pg100.txt +contended pg31100.txt, pg3200.txt +contended, pg3200.txt +contending pg3200.txt +content pg31100.txt, pg3200.txt, pg100.txt +content! pg100.txt +content, pg3200.txt, pg100.txt +content--perfectly pg3200.txt +content--why, pg3200.txt +content-a? pg100.txt +content. pg3200.txt, pg100.txt +content." pg3200.txt +content; pg3200.txt, pg100.txt +content? pg100.txt +contenta. pg100.txt +contented pg31100.txt, pg3200.txt, pg100.txt +contented, pg31100.txt, pg3200.txt, pg100.txt +contented--she pg31100.txt +contented. pg31100.txt, pg3200.txt, pg100.txt +contented." pg31100.txt +contented;" pg31100.txt +contentedly pg31100.txt, pg3200.txt +contentedly, pg3200.txt +contentedly-- pg3200.txt +contenting pg3200.txt +contention pg31100.txt, pg100.txt +contention. pg100.txt +contentless, pg100.txt +contentment pg31100.txt, pg3200.txt +contentment, pg3200.txt +contentment. pg31100.txt, pg3200.txt +contento.' pg100.txt +contents pg31100.txt, pg3200.txt, pg100.txt +contents, pg3200.txt, pg100.txt +contents-- pg31100.txt +contents. pg31100.txt, pg3200.txt, pg100.txt +contents." pg31100.txt +contents: pg31100.txt, pg3200.txt +contents; pg100.txt +contents?" pg31100.txt +contest pg3200.txt, pg100.txt +contest, pg31100.txt +contest. pg3200.txt +contestants pg3200.txt +contestation pg100.txt +contested; pg3200.txt +continent pg3200.txt, pg100.txt +continent, pg3200.txt, pg100.txt +continent. pg3200.txt +continent; pg3200.txt +continental pg3200.txt +continental. pg3200.txt +continents pg3200.txt +continents. pg100.txt +continual pg31100.txt, pg3200.txt +continually pg31100.txt, pg3200.txt +continually. pg3200.txt +continuance pg31100.txt, pg3200.txt, pg100.txt +continuance, pg31100.txt +continuance. pg31100.txt, pg100.txt +continuance; pg100.txt +continuantly pg100.txt +continuate pg100.txt +continuation pg31100.txt +continuation. pg31100.txt +continue pg31100.txt, pg3200.txt, pg100.txt +continue, pg31100.txt +continue. pg3200.txt, pg100.txt +continue." pg3200.txt +continue.' pg3200.txt +continued pg31100.txt, pg3200.txt +continued, pg31100.txt, pg3200.txt +continued-- pg31100.txt, pg3200.txt +continued--dwellings pg3200.txt +continued. pg31100.txt, pg3200.txt +continued: pg31100.txt, pg3200.txt +continued:-- pg31100.txt +continued:--'we pg3200.txt +continued; pg31100.txt, pg3200.txt, pg100.txt +continues pg3200.txt, pg100.txt +continues, pg3200.txt +continues. pg100.txt +continues: pg3200.txt +continues; pg3200.txt +continues] pg3200.txt, pg100.txt +continuing pg31100.txt, pg3200.txt +continuous pg3200.txt +continuously pg3200.txt +contorted pg3200.txt +contraband. pg3200.txt +contract pg3200.txt, pg100.txt +contract! pg100.txt +contract, pg3200.txt, pg100.txt +contract. pg3200.txt, pg100.txt +contract." pg3200.txt +contract.' pg3200.txt +contract; pg3200.txt +contracted pg3200.txt, pg100.txt +contracted, pg100.txt +contractors pg3200.txt +contractors, pg3200.txt +contractors. pg3200.txt +contracts pg3200.txt +contracts, pg3200.txt +contracts. pg3200.txt +contradict pg31100.txt, pg3200.txt, pg100.txt +contradict, pg31100.txt +contradicted." pg31100.txt +contradicting pg3200.txt +contradiction pg3200.txt +contradiction. pg31100.txt, pg100.txt +contradictions pg3200.txt +contradictory pg31100.txt +contradictory, pg31100.txt, pg3200.txt +contradistinction pg3200.txt +contraries pg100.txt +contraries? pg100.txt +contrariety. pg100.txt +contrariously; pg100.txt +contrariwise. pg31100.txt +contrary pg31100.txt, pg3200.txt, pg100.txt +contrary, pg31100.txt, pg3200.txt, pg100.txt +contrary. pg31100.txt, pg100.txt +contrary." pg3200.txt +contrary; pg100.txt +contrary? pg100.txt +contrast pg31100.txt, pg3200.txt +contrast, pg31100.txt, pg3200.txt +contrast. pg3200.txt +contrast: pg3200.txt +contrasted pg31100.txt, pg3200.txt +contrasted"; pg3200.txt +contrasted. pg3200.txt +contrasts pg3200.txt +contrasts. pg3200.txt +contribute pg31100.txt +contribute, pg31100.txt +contribute. pg3200.txt +contributed pg31100.txt, pg3200.txt +contributes pg3200.txt +contributing pg31100.txt +contribution pg3200.txt +contribution. pg3200.txt, pg100.txt +contributions pg3200.txt +contributions--a pg3200.txt +contributions; pg3200.txt +contributor pg3200.txt +contributors pg100.txt +contrition pg3200.txt +contrition, pg31100.txt +contriv'd pg100.txt +contriv'd, pg100.txt +contrivance pg3200.txt +contrivance! pg31100.txt +contrivance, pg31100.txt +contrivances, pg3200.txt +contrive pg31100.txt, pg3200.txt, pg100.txt +contrived pg31100.txt, pg3200.txt +contrived. pg3200.txt +contrives pg100.txt +contrives, pg100.txt +contriving pg31100.txt, pg3200.txt +contriving, pg100.txt +control pg3200.txt +control" pg3200.txt +control't. pg100.txt +control, pg3200.txt, pg100.txt +control- pg100.txt +control. pg3200.txt, pg100.txt +controlled pg3200.txt +controlled." pg3200.txt +controller, pg100.txt +controlling, pg100.txt +controls. pg100.txt +controversies pg3200.txt +controversy pg100.txt +controversy, pg100.txt +controversy. pg100.txt +contumely, pg100.txt +conundrum pg31100.txt, pg3200.txt +conundrums pg3200.txt +conundrums, pg3200.txt +convalescence. pg3200.txt +convalescents, pg3200.txt +convenaiat pg3200.txt +convenience pg31100.txt +convenience, pg31100.txt +convenience. pg31100.txt, pg3200.txt, pg100.txt +convenience.--fancying pg31100.txt +convenience; pg31100.txt +convenienced pg3200.txt +conveniences pg3200.txt, pg100.txt +conveniences." pg3200.txt +conveniences?" pg31100.txt +conveniency pg100.txt +convenient pg31100.txt, pg3200.txt, pg100.txt +convenient!" pg3200.txt +convenient--ten pg3200.txt +convenient. pg100.txt +convenient; pg3200.txt +conveniently pg31100.txt, pg3200.txt +conveniently-blind pg3200.txt +conveniently. pg3200.txt +convent pg3200.txt +convent. pg31100.txt, pg3200.txt +convented pg100.txt +conventicles- pg100.txt +convention pg3200.txt +convention." pg3200.txt +conventional. pg3200.txt +conventionalities--"business" pg3200.txt +conventions. pg3200.txt +convents pg3200.txt +convents, pg100.txt +convers'd pg100.txt +conversant pg31100.txt +conversation pg31100.txt, pg3200.txt, pg100.txt +conversation, pg31100.txt, pg3200.txt +conversation--(it pg31100.txt +conversation. pg31100.txt, pg3200.txt, pg100.txt +conversation." pg31100.txt, pg3200.txt +conversation: pg3200.txt +conversation; pg31100.txt, pg3200.txt +conversation? pg3200.txt +conversational pg3200.txt +conversationalist pg3200.txt +conversations pg31100.txt, pg3200.txt, pg100.txt +conversations, pg31100.txt +converse pg31100.txt, pg3200.txt, pg100.txt +converse, pg31100.txt +converse. pg3200.txt, pg100.txt +conversed pg31100.txt, pg3200.txt +converses pg100.txt +conversing pg31100.txt, pg3200.txt +conversing, pg3200.txt +conversing. pg3200.txt +conversion pg3200.txt, pg100.txt +conversion-- pg3200.txt +conversions pg3200.txt +convert pg31100.txt, pg100.txt +convert, pg3200.txt +convert. pg3200.txt +convert: pg100.txt +convert; pg3200.txt +converted pg3200.txt, pg100.txt +converted. pg3200.txt +convertest, pg100.txt +convertible pg3200.txt +converting pg31100.txt +convertite, pg100.txt +convertites pg100.txt +converts pg3200.txt +converts, pg3200.txt +converts. pg3200.txt +convey pg31100.txt, pg3200.txt, pg100.txt +convey'd pg100.txt +convey'd, pg100.txt +convey, pg31100.txt, pg100.txt +convey. pg3200.txt +conveyance. pg3200.txt, pg100.txt +conveyance." pg3200.txt +conveyed pg31100.txt, pg3200.txt +conveyed, pg3200.txt +conveyed. pg31100.txt +conveyed? pg31100.txt +conveyed?" pg31100.txt +conveying pg31100.txt, pg3200.txt +conveyor, pg3200.txt +conveys pg3200.txt +conveys. pg3200.txt +convict pg3200.txt +convict. pg3200.txt +convict; pg3200.txt +convicted pg3200.txt +convicted. pg3200.txt +conviction pg31100.txt, pg3200.txt +conviction. pg31100.txt, pg3200.txt +conviction." pg31100.txt +conviction: pg3200.txt +conviction; pg31100.txt +conviction?" pg3200.txt +convictions pg31100.txt, pg3200.txt +convictions? pg3200.txt +convicts pg3200.txt +convicts, pg3200.txt +convicts--discipline pg3200.txt +convince pg31100.txt, pg3200.txt, pg100.txt +convince, pg100.txt +convince? pg3200.txt +convinced pg31100.txt, pg3200.txt +convinced, pg31100.txt, pg3200.txt +convinced. pg31100.txt, pg3200.txt +convinced." pg31100.txt +convinced; pg3200.txt +convinces pg3200.txt, pg100.txt +convincing pg31100.txt, pg3200.txt +convincing. pg3200.txt +convincing; pg31100.txt +convocation pg100.txt +convoy. pg100.txt +convulse pg3200.txt +convulsing pg3200.txt +convulsions pg3200.txt +convulsions, pg3200.txt +convulsions. pg3200.txt +convulsively, pg3200.txt +cony-catch'd pg100.txt +cony-catch; pg100.txt +cony-catching! pg100.txt +cooing. pg3200.txt +cook pg31100.txt, pg3200.txt +cook'd. pg100.txt +cook's pg3200.txt +cook, pg3200.txt, pg100.txt +cook- pg100.txt +cook-maid pg3200.txt +cook. pg31100.txt, pg3200.txt, pg100.txt +cook; pg100.txt +cook? pg3200.txt, pg100.txt +cooked pg3200.txt +cooked! pg3200.txt +cooked. pg3200.txt +cookery pg3200.txt, pg100.txt +cooking. pg3200.txt +cooking; pg3200.txt +cooks pg31100.txt, pg100.txt +cooks. pg3200.txt, pg100.txt +cool pg31100.txt, pg3200.txt, pg100.txt +cool'd pg100.txt +cool'd, pg100.txt +cool'd. pg100.txt +cool, pg31100.txt, pg3200.txt, pg100.txt +cool. pg3200.txt, pg100.txt +cooled pg3200.txt +cooled, pg31100.txt +cooled. pg3200.txt +coolgardie pg3200.txt +coolgardie; pg3200.txt +coolies. pg3200.txt +cooling pg3200.txt, pg100.txt +coolly pg31100.txt +coolly, pg31100.txt +coolly. pg31100.txt +coolly: pg3200.txt +coolly; pg31100.txt +coolness pg3200.txt +coolness, pg3200.txt +coolness. pg3200.txt +cools, pg100.txt +cools. pg100.txt +coom pg3200.txt +coomooroo pg3200.txt +coon. pg3200.txt +coonamble pg3200.txt +coontz!' pg3200.txt +coonville pg3200.txt +coop pg3200.txt +cooper pg31100.txt, pg3200.txt +cooper"--handshake. pg3200.txt +cooper's pg3200.txt +cooper, pg3200.txt +cooper. pg3200.txt +cooper; pg3200.txt +cooper?" pg3200.txt +cooperate pg31100.txt +coopers, pg3200.txt +cootamundra pg3200.txt +cope pg31100.txt, pg3200.txt +cope. pg100.txt +copied pg31100.txt, pg3200.txt, pg100.txt +copied. pg3200.txt, pg100.txt +copied." pg31100.txt +copied?" pg3200.txt +copies pg31100.txt, pg3200.txt, pg100.txt +copies, pg3200.txt +copies. pg3200.txt, pg100.txt +copies." pg3200.txt +copies.] pg3200.txt +copious pg3200.txt +copley pg3200.txt +copper pg3200.txt, pg100.txt +copper! pg100.txt +copper, pg3200.txt +copper- pg3200.txt +copper. pg100.txt +coppers pg3200.txt +coppersmith. pg3200.txt +coppice; pg100.txt +copse." pg31100.txt +copts pg3200.txt +copulated pg3200.txt +copy pg31100.txt, pg3200.txt, pg100.txt +copy) pg3200.txt +copy, pg3200.txt +copy--and pg3200.txt +copy-book. pg100.txt +copy. pg3200.txt, pg100.txt +copying pg3200.txt, pg100.txt +copyist pg3200.txt +copyright pg3200.txt, pg100.txt +copyright. pg3200.txt +copyrighted, pg3200.txt +copyrighting pg3200.txt +copyrights, pg3200.txt +coquelin pg3200.txt +coquelin; pg3200.txt +coquetry, pg31100.txt +coquette pg31100.txt +coquette; pg3200.txt +cor. pg100.txt +coragio! pg100.txt +coragio, pg100.txt +coral pg3200.txt +corals, pg31100.txt +coram. pg100.txt +coranto. pg100.txt +corantos, pg100.txt +cord pg3200.txt +cord, pg3200.txt +cord-wood pg3200.txt +cord. pg100.txt +cord; pg100.txt +cordage pg3200.txt +cordelia pg100.txt +cordelia! pg100.txt +cordelia, pg100.txt +cordelia- pg100.txt +cordelia. pg100.txt +cordelia? pg100.txt +cordial pg31100.txt, pg3200.txt +cordial, pg3200.txt +cordial. pg3200.txt, pg100.txt +cordial: pg3200.txt +cordiality pg31100.txt +cordiality, pg31100.txt +cordiality. pg31100.txt +cordiality." pg31100.txt +cordially pg3200.txt +cordially, pg3200.txt +cordially. pg31100.txt, pg3200.txt +cordials pg3200.txt +cordon, pg3200.txt +cordova, pg3200.txt +cords pg3200.txt, pg100.txt +cords, pg100.txt +cords. pg100.txt +core pg3200.txt +core, pg3200.txt +core. pg3200.txt +core? pg100.txt +coriantumr. pg3200.txt +coriantumr; pg3200.txt +corihor," pg3200.txt +corin pg100.txt +corinth pg100.txt +corinth! pg100.txt +corinthian pg3200.txt +corinthian, pg100.txt +coriolanus pg100.txt +coriolanus! pg100.txt +coriolanus, pg100.txt +coriolanus. pg100.txt +coriolanus] pg100.txt +corioli pg100.txt +corioli, pg100.txt +corioli. pg100.txt +corioli; pg100.txt +corioli? pg100.txt +cork pg3200.txt, pg100.txt +cork! pg3200.txt +cork. pg3200.txt +corked pg3200.txt +corked! pg3200.txt +corker." pg3200.txt +corker.' pg3200.txt +corkindale pg3200.txt +corks--says: pg3200.txt +corkscrew, pg3200.txt +corkscrew. pg3200.txt +corkscrewed, pg3200.txt +corkscrewing pg3200.txt +cormorant, pg100.txt +corn pg3200.txt, pg100.txt +corn! pg100.txt +corn, pg3200.txt, pg100.txt +corn-bread." pg3200.txt +corn-crib; pg3200.txt +corn-flower; pg3200.txt +corn-pone pg3200.txt +corn-pone. pg3200.txt +corn-pone." pg3200.txt +corn-stalks pg3200.txt +corn. pg100.txt +corn; pg3200.txt, pg100.txt +cornelia pg3200.txt +cornelius pg100.txt +cornelius,] pg100.txt +cornelius. pg100.txt +cornelius? pg100.txt +corner pg31100.txt, pg3200.txt, pg100.txt +corner!" pg3200.txt +corner, pg31100.txt, pg3200.txt +corner--on pg3200.txt +corner--one pg3200.txt +corner-stone. pg3200.txt +corner. pg31100.txt, pg3200.txt, pg100.txt +corner." pg31100.txt +corner; pg3200.txt +corner? pg100.txt +cornered pg3200.txt +cornered, pg3200.txt +corners pg3200.txt +corners, pg3200.txt +corners. pg3200.txt, pg100.txt +cornerstone? pg100.txt +cornetcy pg3200.txt +cornets pg100.txt +cornfields pg3200.txt +corns pg3200.txt +cornuto pg100.txt +cornwall, pg100.txt +cornwall. pg3200.txt, pg100.txt +cornwall; pg100.txt +cornwall? pg100.txt +corollary, pg100.txt +coronation pg3200.txt, pg100.txt +coronation). pg3200.txt +coronation, pg100.txt +coronation--check. pg3200.txt +coronation-day, pg100.txt +coronation. pg3200.txt, pg100.txt +coronation; pg100.txt +coronation? pg100.txt +coronation?" pg3200.txt +coroner pg3200.txt +coroner's pg3200.txt +coroner. pg3200.txt +coroner] pg3200.txt +coronet pg3200.txt, pg100.txt +coronet, pg3200.txt +coroneting pg3200.txt +coronets pg3200.txt +coronets, pg100.txt +corporal pg3200.txt, pg100.txt +corporal, pg100.txt +corporal. pg3200.txt +corporation pg3200.txt +corporators pg3200.txt +corps pg3200.txt +corps, pg3200.txt +corps-bandits pg3200.txt +corps. pg3200.txt +corps." pg3200.txt +corps?" pg3200.txt +corpse pg3200.txt +corpse!--come, pg3200.txt +corpse, pg3200.txt, pg100.txt +corpse--that pg3200.txt +corpse. pg3200.txt +corpse." pg3200.txt +corpse; pg3200.txt +corpse? pg3200.txt +corpses pg3200.txt +corpses. pg3200.txt +corpses; pg3200.txt +corpulence pg3200.txt +corpus'. pg3200.txt +corpuscles. pg3200.txt +corral pg3200.txt +corral, pg3200.txt +corraled pg3200.txt +correct pg31100.txt, pg3200.txt, pg100.txt +correct! pg3200.txt +correct, pg31100.txt, pg3200.txt, pg100.txt +correct--it pg3200.txt +correct. pg31100.txt, pg3200.txt +correct." pg3200.txt +correct? pg3200.txt +correct?" pg3200.txt +corrected pg3200.txt +corrected. pg3200.txt +correcting pg3200.txt +correcting. pg3200.txt +correcting: pg3200.txt +correction pg3200.txt, pg100.txt +correction, pg100.txt +correction--with pg3200.txt +correction. pg3200.txt, pg100.txt +corrections pg3200.txt +corrective pg31100.txt +correctly pg3200.txt +correctly): pg3200.txt +correctly, pg3200.txt +correctly. pg3200.txt +correctly." pg3200.txt +correctly: pg3200.txt +correctness, pg31100.txt +correlation pg3200.txt +correspond pg3200.txt +correspond!' pg3200.txt +corresponded pg31100.txt +correspondence pg31100.txt, pg3200.txt +correspondence!" pg3200.txt +correspondence, pg31100.txt, pg3200.txt +correspondence. pg3200.txt +correspondence." pg3200.txt +correspondence; pg31100.txt +correspondent pg31100.txt, pg3200.txt +correspondent, pg31100.txt, pg3200.txt +correspondent. pg3200.txt +correspondents pg3200.txt +correspondents' pg3200.txt +correspondents, pg3200.txt +correspondents. pg3200.txt +correspondents." pg31100.txt +corresponding pg3200.txt, pg100.txt +corresponding. pg31100.txt +correspondingly pg3200.txt +correspondingly. pg3200.txt +corresponds pg3200.txt +corridor, pg3200.txt +corridors pg3200.txt +corridors, pg3200.txt +corroborate. pg100.txt +corroborated pg31100.txt +corroborated)." pg3200.txt +corroborating pg31100.txt +corroboration. pg31100.txt +corrosive, pg100.txt +corrugated pg3200.txt +corrugations pg3200.txt +corrupt pg3200.txt, pg100.txt +corrupt, pg3200.txt, pg100.txt +corrupt. pg100.txt +corrupted pg3200.txt, pg100.txt +corrupted. pg100.txt +corrupting pg100.txt +corruption pg3200.txt, pg100.txt +corruption, pg3200.txt, pg100.txt +corruption. pg3200.txt +corruption.] pg3200.txt +corruption?" pg3200.txt +corruptionist! pg3200.txt +corruptionville, pg3200.txt +corruptionville. pg3200.txt +corruptor." pg3200.txt +corse pg100.txt +corse- pg100.txt +corse. pg100.txt +corse; pg100.txt +corse? pg100.txt +corslet pg100.txt +cosmopolitan pg3200.txt +cosmopolitans, pg3200.txt +coss." pg3200.txt +cossack pg3200.txt +cost pg31100.txt, pg3200.txt, pg100.txt +cost, pg31100.txt, pg3200.txt, pg100.txt +cost. pg3200.txt, pg100.txt +cost." pg3200.txt +cost; pg3200.txt, pg100.txt +cost? pg100.txt +costard pg100.txt +costard,' pg100.txt +costard. pg100.txt +costard] pg100.txt +costeth pg3200.txt +costing pg3200.txt +costliness pg31100.txt +costly pg3200.txt +costly, pg3200.txt +costly. pg3200.txt +costly; pg3200.txt +costs pg3200.txt +costs, pg100.txt +costs. pg3200.txt +costs." pg3200.txt +costume pg3200.txt +costume, pg3200.txt +costume. pg3200.txt +costume.)..... pg3200.txt +costume.--timbs' pg3200.txt +costumed pg3200.txt +costumery pg3200.txt +costumery. pg3200.txt +costumes pg3200.txt +costumes, pg3200.txt +cosy pg3200.txt +cot pg3200.txt +cot-bed pg3200.txt +cot. pg31100.txt +cote pg3200.txt +cote-house, pg3200.txt +coted pg100.txt +cotsall. pg100.txt +cottage pg31100.txt, pg3200.txt, pg100.txt +cottage!" pg31100.txt +cottage, pg31100.txt, pg3200.txt, pg100.txt +cottage. pg31100.txt, pg3200.txt, pg100.txt +cottage." pg31100.txt +cottage: pg31100.txt +cottage; pg31100.txt +cottager's pg31100.txt +cottages pg3200.txt +cottages. pg31100.txt +cottages; pg31100.txt +cotton pg31100.txt, pg3200.txt +cotton, pg3200.txt +cotton-bale, pg3200.txt +cotton-factories pg3200.txt +cotton-gin. pg3200.txt +cotton-seed pg3200.txt +cotton-wood pg3200.txt +cotton. pg3200.txt +cottonwood. pg3200.txt +cottonwood." pg3200.txt +cottonwoods pg3200.txt +couch pg3200.txt, pg100.txt +couch'd, pg100.txt +couch'd. pg100.txt +couch, pg31100.txt, pg3200.txt, pg100.txt +coude? pg100.txt +cough pg31100.txt, pg3200.txt +cough, pg3200.txt +cough; pg100.txt +coughing pg3200.txt +coughlin. pg3200.txt +could! pg100.txt +could), pg3200.txt +could, pg31100.txt, pg3200.txt, pg100.txt +could- pg100.txt +could--"and pg31100.txt +could--"upon pg31100.txt +could--he pg3200.txt +could--that pg31100.txt +could. pg31100.txt, pg3200.txt, pg100.txt +could." pg31100.txt, pg3200.txt +could.... pg31100.txt +could: pg3200.txt +could; pg31100.txt, pg3200.txt +could;" pg31100.txt +could? pg3200.txt +could?" pg3200.txt +couldn't pg3200.txt +couldn't! pg3200.txt +couldn't!" pg3200.txt +couldn't, pg3200.txt +couldn't. pg3200.txt +couldn't." pg3200.txt +couldn't? pg3200.txt +couldst pg100.txt +couldst! pg100.txt +council pg3200.txt, pg100.txt +council, pg3200.txt, pg100.txt +council-chamber pg100.txt +council-fire, pg3200.txt +council-house, pg100.txt +council-lodge pg3200.txt +council-lodge, pg3200.txt +council-table. pg3200.txt +council. pg31100.txt, pg3200.txt, pg100.txt +council: pg3200.txt, pg100.txt +council? pg100.txt +council?" pg3200.txt +council] pg100.txt +councillor, pg100.txt +councillors. pg3200.txt +councils pg3200.txt +councils, pg3200.txt, pg100.txt +counsel pg31100.txt, pg3200.txt, pg100.txt +counsel! pg100.txt +counsel'd. pg100.txt +counsel, pg31100.txt, pg3200.txt, pg100.txt +counsel-keeper. pg100.txt +counsel. pg3200.txt, pg100.txt +counsel." pg3200.txt +counsel: pg100.txt +counsel; pg100.txt +counsel? pg100.txt +counseling pg3200.txt +counsell'd; pg100.txt +counsellor pg100.txt +counsellor! pg100.txt +counsellor, pg100.txt +counsellor. pg100.txt +counsellors pg100.txt +counsellors, pg100.txt +counselor's pg3200.txt +counselor-at-law pg3200.txt +counselor? pg100.txt +counsels pg3200.txt, pg100.txt +counsels, pg100.txt +counsels. pg100.txt +counsels; pg100.txt +count pg31100.txt, pg3200.txt, pg100.txt +count's pg3200.txt, pg100.txt +count, pg3200.txt, pg100.txt +count--anybody pg3200.txt +count--in pg3200.txt +count-cardinal pg100.txt +count. pg3200.txt, pg100.txt +count; pg100.txt +count? pg100.txt +counted pg31100.txt, pg3200.txt, pg100.txt +counted. pg3200.txt +countenanc'd. pg100.txt +countenance pg31100.txt, pg3200.txt, pg100.txt +countenance! pg100.txt +countenance!" pg3200.txt +countenance!--and pg31100.txt +countenance, pg31100.txt, pg3200.txt, pg100.txt +countenance. pg31100.txt, pg3200.txt, pg100.txt +countenance." pg3200.txt +countenance.' pg3200.txt +countenance; pg100.txt +countenance? pg3200.txt, pg100.txt +countenance?" pg3200.txt +countenances pg3200.txt +countenances. pg3200.txt, pg100.txt +counter pg3200.txt +counter, pg31100.txt, pg3200.txt +counter,--trying, pg31100.txt +counter-caster, pg100.txt +counter-seal'd. pg100.txt +counter-yells pg3200.txt +counter. pg100.txt +counter." pg3200.txt +counteract pg31100.txt +counteract, pg31100.txt +counteracting pg31100.txt +counterbalance. pg31100.txt +counterchange pg100.txt +counterfeit pg3200.txt, pg100.txt +counterfeit, pg100.txt +counterfeit-money pg3200.txt +counterfeit. pg100.txt +counterfeit: pg100.txt +counterfeit; pg100.txt +counterfeit? pg100.txt +counterfeited pg3200.txt +counterfeited, pg3200.txt +counterfeiting pg3200.txt, pg100.txt +counterfeiting. pg100.txt +counterfeitly. pg100.txt +counterfeits, pg3200.txt +counterfeits. pg3200.txt, pg100.txt +countermand pg3200.txt, pg100.txt +countermands pg100.txt +countermines; pg100.txt +counterpart pg31100.txt, pg3200.txt +counterpart, pg31100.txt +counterpoints, pg100.txt +counterpois'd. pg100.txt +counterpoise, pg100.txt +countersign pg3200.txt +countersign." pg3200.txt +countess pg3200.txt, pg100.txt +countesse pg3200.txt +countesses. pg100.txt +counties, pg100.txt +counties--and pg3200.txt +counting pg31100.txt, pg3200.txt +counting, pg3200.txt +counting-room. pg3200.txt +countless pg31100.txt, pg3200.txt +countries pg31100.txt, pg3200.txt, pg100.txt +countries, pg31100.txt, pg3200.txt +countries--the pg3200.txt +countries. pg3200.txt, pg100.txt +countries." pg3200.txt +countries; pg3200.txt +country pg31100.txt, pg3200.txt, pg100.txt +country! pg100.txt +country's pg3200.txt, pg100.txt +country's, pg100.txt +country, pg31100.txt, pg3200.txt, pg100.txt +country," pg3200.txt +country- pg100.txt +country--astonishing pg3200.txt +country--for pg3200.txt +country--immense pg3200.txt +country--the pg3200.txt +country--treasure, pg3200.txt +country-bred pg3200.txt +country-folk, pg3200.txt +country-lass pg3200.txt +country-maid. pg3200.txt +country-place pg3200.txt +country-seats, pg3200.txt +country-side. pg3200.txt +country-towns. pg3200.txt +country-woman. pg100.txt +country-woven pg3200.txt +country. pg31100.txt, pg3200.txt, pg100.txt +country." pg31100.txt, pg3200.txt +country.' pg3200.txt +country: pg3200.txt +country; pg31100.txt, pg3200.txt, pg100.txt +country? pg31100.txt, pg3200.txt, pg100.txt +country?" pg3200.txt +countryman pg100.txt +countryman, pg100.txt +countryman- pg100.txt +countryman. pg100.txt +countryman? pg100.txt +countrymen pg3200.txt, pg100.txt +countrymen! pg100.txt +countrymen, pg100.txt +countrymen- pg100.txt +countrymen. pg3200.txt, pg100.txt +countrymen.' pg100.txt +countrymen: pg100.txt +countrymen; pg100.txt +countrymen? pg100.txt +countryside. pg3200.txt +countryward, pg3200.txt +counts pg3200.txt, pg100.txt +counts, pg3200.txt +county pg31100.txt, pg3200.txt, pg100.txt +county' pg3200.txt +county, pg31100.txt, pg3200.txt, pg100.txt +county," pg3200.txt +county-seat pg3200.txt +county-seat. pg3200.txt +county. pg3200.txt, pg100.txt +county." pg31100.txt +county.' pg3200.txt +county; pg3200.txt +coupe. pg3200.txt +couple pg31100.txt, pg3200.txt, pg100.txt +couple, pg31100.txt, pg3200.txt +couple. pg31100.txt, pg3200.txt +couple." pg31100.txt +coupled pg3200.txt +couplement! pg100.txt +couples pg31100.txt, pg100.txt +couples), pg31100.txt +couples, pg3200.txt +couples. pg3200.txt +couples." pg31100.txt +courage pg31100.txt, pg3200.txt, pg100.txt +courage! pg100.txt +courage, pg31100.txt, pg3200.txt, pg100.txt +courage. pg3200.txt, pg100.txt +courage." pg3200.txt +courage; pg100.txt +courage? pg3200.txt +courageous pg3200.txt, pg100.txt +courageously pg31100.txt, pg3200.txt +courant pg3200.txt +courcy pg31100.txt +courcy!" pg31100.txt +courcy, pg31100.txt +courcy. pg31100.txt +courier pg3200.txt +courier, pg3200.txt +courier--he pg3200.txt +courier. pg3200.txt +courier; pg3200.txt +courier?" pg3200.txt +couriers pg3200.txt +course pg31100.txt, pg3200.txt, pg100.txt +course! pg3200.txt +course!' pg3200.txt +course) pg3200.txt +course, pg31100.txt, pg3200.txt, pg100.txt +course," pg31100.txt +course-- pg3200.txt +course--" pg3200.txt +course--and pg31100.txt +course--as pg3200.txt +course--consist pg3200.txt +course--fifty-six pg3200.txt +course--for pg3200.txt +course--has pg3200.txt +course--it pg3200.txt +course--its pg3200.txt +course--or pg3200.txt +course--people pg3200.txt +course--that pg3200.txt +course--things pg3200.txt +course--two-thirds pg3200.txt +course--vicksburg. pg3200.txt +course--what pg3200.txt +course. pg31100.txt, pg3200.txt, pg100.txt +course." pg31100.txt, pg3200.txt +course."-- pg31100.txt +course: pg3200.txt +course; pg31100.txt, pg3200.txt, pg100.txt +course? pg3200.txt, pg100.txt +course?" pg31100.txt +coursed pg3200.txt +courser; pg100.txt +courses pg3200.txt, pg100.txt +courses, pg31100.txt +courses. pg31100.txt, pg3200.txt +courses; pg3200.txt +court pg31100.txt, pg3200.txt, pg100.txt +court! pg3200.txt, pg100.txt +court!" pg3200.txt +court's pg3200.txt +court) pg3200.txt +court). pg3200.txt +court, pg31100.txt, pg3200.txt, pg100.txt +court," pg3200.txt +court- pg100.txt +court--a pg3200.txt +court--order!" pg3200.txt +court--that pg3200.txt +court--yes, pg3200.txt +court-baron. pg31100.txt +court-dress pg3200.txt +court-hand. pg100.txt +court-house pg3200.txt +court-masque, pg3200.txt +court-odour pg100.txt +court-plaister pg31100.txt +court-plaister. pg31100.txt +court-plaister?--i pg31100.txt +court-room pg3200.txt +court-room, pg3200.txt +court. pg3200.txt, pg100.txt +court." pg31100.txt, pg3200.txt +court.' pg3200.txt, pg100.txt +court.) pg3200.txt +court: pg3200.txt +court; pg3200.txt, pg100.txt +court? pg100.txt +court?' pg3200.txt +courte- pg3200.txt +courted pg31100.txt, pg3200.txt, pg100.txt +courtemains, pg3200.txt +courteous pg31100.txt, pg3200.txt +courteous, pg100.txt +courteous. pg31100.txt, pg3200.txt +courteously pg100.txt +courtesans pg3200.txt +courtesied, pg3200.txt +courtesies pg31100.txt, pg3200.txt, pg100.txt +courtesies, pg100.txt +courtesies. pg100.txt +courtesy pg31100.txt, pg3200.txt, pg100.txt +courtesy, pg31100.txt, pg3200.txt, pg100.txt +courtesy- pg100.txt +courtesy. pg31100.txt, pg3200.txt, pg100.txt +courtesy." pg3200.txt +courtesy; pg3200.txt, pg100.txt +courtezan pg100.txt +courtezan! pg100.txt +courtezan's pg100.txt +courtezan. pg100.txt +courtezans, pg100.txt +courtier pg3200.txt, pg100.txt +courtier's pg100.txt +courtier, pg100.txt +courtier. pg100.txt +courtier? pg100.txt +courtiers pg3200.txt, pg100.txt +courtiers, pg100.txt +courtiers. pg3200.txt, pg100.txt +courting pg3200.txt +courtliest pg3200.txt +courtliness, pg3200.txt +courtly, pg3200.txt, pg100.txt +courtroom, pg3200.txt +courts pg3200.txt +courts, pg3200.txt +courts--which pg3200.txt +courts-martial pg3200.txt +courts. pg3200.txt +courtship pg31100.txt +courtships. pg3200.txt +cousin pg31100.txt, pg3200.txt, pg100.txt +cousin! pg100.txt +cousin!" pg31100.txt +cousin'- pg100.txt +cousin's pg31100.txt +cousin's. pg100.txt +cousin's." pg31100.txt +cousin, pg31100.txt, pg3200.txt, pg100.txt +cousin. pg31100.txt, pg3200.txt, pg100.txt +cousin." pg31100.txt +cousin; pg31100.txt, pg100.txt +cousin? pg31100.txt, pg100.txt +cousin?" pg31100.txt +cousinly pg3200.txt +cousins pg31100.txt, pg3200.txt +cousins!" pg31100.txt +cousins' pg31100.txt +cousins'; pg31100.txt +cousins, pg31100.txt, pg3200.txt, pg100.txt +cousins--as pg31100.txt +cousins. pg31100.txt, pg100.txt +cousins." pg31100.txt +couttet, pg3200.txt +cove--hobart! pg3200.txt +covenant pg3200.txt +covenant, pg3200.txt +covenants pg100.txt +covent pg31100.txt +coventry pg100.txt +coventry. pg100.txt +coventry; pg100.txt +cover pg31100.txt, pg3200.txt, pg100.txt +cover'd pg100.txt +cover'd. pg100.txt +cover, pg3200.txt, pg100.txt +cover. pg100.txt +cover; pg3200.txt +covered pg31100.txt, pg3200.txt, pg100.txt +covered, pg3200.txt, pg100.txt +covering pg31100.txt, pg3200.txt +coverlet, pg3200.txt +covers pg3200.txt +covers! pg100.txt +covers, pg3200.txt +covers. pg3200.txt +covert, pg31100.txt +covert." pg31100.txt +covertly pg3200.txt +covertly, pg3200.txt +coverture, pg100.txt +coverture. pg100.txt +coveted., pg3200.txt +covetous pg100.txt +covetous, pg100.txt +covetous. pg100.txt +covetousness pg3200.txt +covetousness. pg3200.txt, pg100.txt +covetousness; pg100.txt +cow pg3200.txt, pg100.txt +cow!' pg3200.txt +cow, pg3200.txt +cow-- pg3200.txt +cow-bell, pg3200.txt +cow-catcher pg3200.txt +cow-keepers. pg3200.txt +cow-stable! pg3200.txt +cow. pg3200.txt +cow; pg3200.txt +cow?" pg3200.txt +coward pg3200.txt, pg100.txt +coward! pg100.txt +coward's pg3200.txt +coward, pg3200.txt, pg100.txt +coward,- pg100.txt +coward. pg3200.txt, pg100.txt +coward; pg100.txt +coward? pg3200.txt, pg100.txt +cowardice pg100.txt +cowardice, pg100.txt +cowardice- pg100.txt +cowardice. pg3200.txt, pg100.txt +cowardice; pg100.txt +cowardly pg3200.txt, pg100.txt +cowardly, pg31100.txt, pg100.txt +cowards pg3200.txt, pg100.txt +cowards, pg3200.txt, pg100.txt +cowards; pg3200.txt +cowboy's pg3200.txt +cowboy, pg3200.txt +cowboy." pg3200.txt +cowden pg3200.txt +cowdery, pg3200.txt +cowed pg3200.txt +cowhide pg3200.txt +cowhide. pg3200.txt +cowhide." pg3200.txt +cowhiding pg3200.txt +cowl pg3200.txt +cowper pg31100.txt +cowper, pg31100.txt +cowper." pg31100.txt +cowper? pg31100.txt +cows pg3200.txt +cows, pg31100.txt, pg3200.txt +cows--yes, pg3200.txt +cows. pg31100.txt, pg3200.txt +cows." pg3200.txt +cows; pg3200.txt +cox pg31100.txt +cox-comb pg100.txt +cox. pg31100.txt +coxcomb pg3200.txt +coxcomb! pg100.txt +coxcomb. pg100.txt +coxcomb." pg31100.txt +coxcomb? pg100.txt +coxcombs pg100.txt +coxcombs." pg31100.txt +coy'd pg100.txt +coy, pg100.txt +coyote pg3200.txt +coyotes pg3200.txt +coyotes, pg3200.txt +coz, pg100.txt +coz. pg100.txt +coz; pg100.txt +coz? pg100.txt +cozen pg100.txt +cozen'd pg100.txt +cozen'd, pg100.txt +cozenage! pg100.txt +cozenage; pg100.txt +cozened. pg100.txt +cozener. pg100.txt +cozy pg3200.txt +cozy, pg3200.txt +crab pg100.txt +crab, pg100.txt +crab. pg100.txt +crabbed; pg100.txt +crabs-- pg3200.txt +crabs; pg3200.txt +crack pg3200.txt, pg100.txt +crack'd pg100.txt +crack'd! pg100.txt +crack'd, pg100.txt +crack, pg3200.txt +crack-hemp. pg100.txt +crack. pg3200.txt, pg100.txt +cracked pg3200.txt +cracker pg3200.txt +cracking pg100.txt +cracking, pg3200.txt +crackled pg3200.txt +crackling pg3200.txt +cracklings pg3200.txt +cracks pg3200.txt, pg100.txt +cracks, pg3200.txt, pg100.txt +craddock. pg3200.txt +cradle pg31100.txt, pg3200.txt, pg100.txt +cradle, pg3200.txt +cradle-- pg3200.txt +cradle-babe pg100.txt +cradle. pg100.txt +cradle." pg3200.txt +cradle; pg100.txt +cradle? pg3200.txt +cradlebaugh pg3200.txt +cradlebaugh's pg3200.txt +cradled pg3200.txt +cradles pg3200.txt +cradles. pg100.txt +craft pg3200.txt, pg100.txt +craft, pg3200.txt, pg100.txt +craft. pg3200.txt +craft." pg3200.txt +craft; pg3200.txt +craftily pg100.txt +crafts pg100.txt +crafts. pg3200.txt +crag pg3200.txt +crag, pg3200.txt +craggy pg3200.txt +crags pg3200.txt +crags, pg3200.txt +crags--a pg3200.txt +cram pg3200.txt, pg100.txt +cramm'd pg100.txt +crammed pg3200.txt +cramp pg31100.txt +cramp's pg3200.txt +cramp. pg100.txt +cramped pg3200.txt +cramped, pg3200.txt +cramping pg3200.txt +cramps, pg31100.txt, pg100.txt +cranberries, pg3200.txt +crane pg3200.txt +crane, pg3200.txt +crane. pg3200.txt +craned pg3200.txt +cranes pg3200.txt +crank. pg3200.txt +cranking pg100.txt +cranmer pg100.txt +cranmer's pg100.txt +cranmer, pg100.txt +cranmer. pg100.txt +cranmer; pg100.txt +cranmer? pg100.txt +cranny pg100.txt +crants, pg100.txt +crape! pg3200.txt +crare pg100.txt +crash pg3200.txt, pg100.txt +crash! pg3200.txt +crash, pg3200.txt +crash-word pg3200.txt +crash. pg3200.txt +crash; pg3200.txt +crashed pg3200.txt +crashes pg3200.txt +crashing pg3200.txt +crassus, pg100.txt +crassus. pg100.txt +crate pg3200.txt +crate, pg3200.txt +crater pg3200.txt +crater, pg3200.txt +crater. pg3200.txt +craters; pg3200.txt +cravat pg3200.txt +cravats pg3200.txt +cravats, pg3200.txt +crave pg100.txt +crave, pg100.txt +crave- pg100.txt +crave." pg3200.txt +craven. pg100.txt +craves pg3200.txt, pg100.txt +craving pg31100.txt, pg3200.txt +crawford pg31100.txt +crawford's pg31100.txt +crawford's," pg31100.txt +crawford's. pg31100.txt +crawford, pg31100.txt +crawford," pg31100.txt +crawford. pg31100.txt +crawford." pg31100.txt +crawford; pg31100.txt +crawford?" pg31100.txt +crawfords' pg31100.txt +crawl pg3200.txt +crawl. pg3200.txt +crawled pg3200.txt +crawler, pg3200.txt +crawlers, pg3200.txt +crawling pg3200.txt +crawling, pg3200.txt +crawling. pg3200.txt +crayon, pg31100.txt +crayons!--how pg31100.txt +crayons, pg3200.txt +craze, pg3200.txt +crazed pg100.txt +craziest. pg3200.txt +crazily pg3200.txt +crazy pg3200.txt +crazy, pg3200.txt +crazy. pg3200.txt +crazy." pg3200.txt +crazy; pg3200.txt +crazy?" pg3200.txt +creak pg3200.txt +creaked! pg3200.txt +creaking. pg3200.txt +creaks pg3200.txt +cream pg3200.txt +cream, pg100.txt +cream-cheese pg31100.txt +cream-colored pg3200.txt +cream. pg31100.txt, pg3200.txt, pg100.txt +crease pg3200.txt +create pg31100.txt, pg3200.txt, pg100.txt +create! pg100.txt +create. pg31100.txt, pg3200.txt +created pg31100.txt, pg3200.txt, pg100.txt +created, pg31100.txt, pg3200.txt, pg100.txt +created. pg31100.txt, pg3200.txt +created? pg3200.txt +created_." pg31100.txt +creates pg100.txt +creates! pg3200.txt +creating pg31100.txt, pg3200.txt +creating. pg31100.txt, pg100.txt +creation pg3200.txt, pg100.txt +creation, pg3200.txt, pg100.txt +creation. pg3200.txt, pg100.txt +creation." pg31100.txt +creations pg31100.txt, pg3200.txt +creations-- pg3200.txt +creations." pg31100.txt +creator pg3200.txt +creator, pg3200.txt +creator. pg3200.txt +creator." pg3200.txt +creators, pg3200.txt +creature pg31100.txt, pg3200.txt, pg100.txt +creature! pg3200.txt +creature's pg3200.txt +creature, pg31100.txt, pg3200.txt, pg100.txt +creature,--as pg31100.txt +creature- pg100.txt +creature. pg31100.txt, pg3200.txt, pg100.txt +creature." pg31100.txt, pg3200.txt +creature: pg31100.txt +creature; pg31100.txt, pg100.txt +creature? pg3200.txt, pg100.txt +creature?" pg31100.txt +creatures pg31100.txt, pg3200.txt, pg100.txt +creatures! pg3200.txt +creatures!" pg3200.txt +creatures, pg31100.txt, pg3200.txt, pg100.txt +creatures. pg3200.txt +creatures.' pg3200.txt +creatures; pg3200.txt, pg100.txt +creatures? pg3200.txt +credence pg100.txt +credence. pg100.txt +credent pg100.txt +credibility; pg3200.txt +credit pg31100.txt, pg3200.txt, pg100.txt +credit, pg31100.txt, pg3200.txt, pg100.txt +credit- pg100.txt +credit. pg3200.txt, pg100.txt +credit." pg31100.txt +credit; pg3200.txt, pg100.txt +credit? pg100.txt +creditable pg31100.txt, pg3200.txt +creditable, pg31100.txt +creditably pg3200.txt +creditably, pg3200.txt +creditably--at pg3200.txt +creditably." pg31100.txt +credited pg3200.txt +crediting. pg31100.txt +creditor pg3200.txt +creditor, pg100.txt +creditor. pg100.txt +creditors pg31100.txt, pg3200.txt, pg100.txt +creditors--well, pg3200.txt +creditors. pg3200.txt +credo. pg100.txt +credulity pg3200.txt, pg100.txt +credulous; pg31100.txt +creed pg3200.txt +creed, pg3200.txt +creed. pg3200.txt +creed; pg100.txt +creek pg3200.txt, pg100.txt +creek, pg3200.txt +creek," pg3200.txt +creek. pg3200.txt +creep pg3200.txt, pg100.txt +creep, pg3200.txt +creep. pg3200.txt, pg100.txt +creepers pg3200.txt +creeping pg3200.txt +creeps. pg100.txt +creepy pg3200.txt +creetur's pg3200.txt +creetur. pg3200.txt +cremation pg3200.txt +creoles--french. pg3200.txt +crepitations pg3200.txt +crept pg3200.txt, pg100.txt +crept, pg3200.txt, pg100.txt +crept. pg100.txt +crescent pg3200.txt +crescent, pg31100.txt, pg3200.txt +crescents pg3200.txt +cressid! pg100.txt +cressid, pg100.txt +cressid. pg100.txt +cressid.' pg100.txt +cressid? pg100.txt +cressida pg100.txt +cressida's? pg100.txt +cressida. pg100.txt +cressida? pg100.txt +cressids, pg100.txt +crest pg3200.txt, pg100.txt +crest, pg100.txt +crest. pg3200.txt, pg100.txt +crested pg3200.txt +crestfall'n, pg100.txt +crestfallen. pg3200.txt +crests, pg3200.txt +crests; pg100.txt +crete pg100.txt +crete, pg3200.txt, pg100.txt +cretur pg3200.txt +cretur's pg3200.txt +cretur, pg3200.txt +creturs pg3200.txt +crevice pg3200.txt +crevice, pg3200.txt +crevice. pg3200.txt +crevices pg3200.txt +crew pg3200.txt +crew, pg3200.txt, pg100.txt +crew. pg3200.txt, pg100.txt +crew." pg3200.txt +crew.' pg3200.txt +crew; pg3200.txt, pg100.txt +crew? pg100.txt +crewels, pg3200.txt +crews pg3200.txt +crews, pg100.txt +crib, pg3200.txt +crib-- pg3200.txt +crib. pg31100.txt +cribbage pg3200.txt +cribs, pg100.txt +cricket-grounds pg3200.txt +cried pg31100.txt, pg3200.txt, pg100.txt +cried, pg31100.txt, pg3200.txt, pg100.txt +cried-- pg3200.txt +cried--but pg3200.txt +cried. pg3200.txt +cried: pg3200.txt +cried; pg100.txt +cried? pg100.txt +cried?" pg3200.txt +crier pg100.txt +crier. pg100.txt +cries pg3200.txt, pg100.txt +cries" pg3200.txt +cries, pg100.txt +cries. pg3200.txt, pg100.txt +cries: pg3200.txt +cries; pg3200.txt +cries? pg100.txt +criest pg100.txt +crieth] pg100.txt +crime pg31100.txt, pg3200.txt, pg100.txt +crime!" pg3200.txt +crime!--and pg3200.txt +crime, pg3200.txt, pg100.txt +crime. pg3200.txt, pg100.txt +crime." pg3200.txt +crime? pg3200.txt +crime?" pg3200.txt +crimean pg3200.txt +crimeless. pg100.txt +crimes pg3200.txt, pg100.txt +crimes, pg3200.txt, pg100.txt +crimes. pg31100.txt, pg3200.txt +criminal pg3200.txt +criminal, pg3200.txt +criminal. pg3200.txt +criminal; pg3200.txt +criminals pg3200.txt +criminals, pg3200.txt +criminals. pg3200.txt +crimson pg3200.txt +crimson, pg3200.txt +crimson; pg3200.txt +cringing pg3200.txt +crinkle pg3200.txt +crinkling pg3200.txt +crinklings. pg3200.txt +crioni pg3200.txt +crioni, pg3200.txt +cripple pg3200.txt +cripple, pg3200.txt +crippled pg3200.txt +crippled. pg3200.txt +cripples pg3200.txt +cripples, pg3200.txt +cripples--the pg3200.txt +crippling pg3200.txt +crisis, pg3200.txt +crisis. pg3200.txt +crisp pg3200.txt +crisp, pg3200.txt +crispian. pg100.txt +crispian.' pg100.txt +crispianus. pg100.txt +criss-cross pg3200.txt +cristo." pg3200.txt +critic pg3200.txt +critic's pg3200.txt +critic, pg31100.txt, pg3200.txt +critic. pg3200.txt +critical pg31100.txt, pg3200.txt +critical, pg3200.txt, pg100.txt +critical. pg3200.txt, pg100.txt +critically pg3200.txt +criticise pg3200.txt +criticise. pg31100.txt +criticise." pg31100.txt, pg3200.txt +criticised pg3200.txt +criticised. pg3200.txt +criticism pg3200.txt +criticism, pg3200.txt +criticism--disarmed pg3200.txt +criticism. pg31100.txt, pg3200.txt +criticism; pg3200.txt +criticisms pg3200.txt +criticized pg3200.txt +critics pg31100.txt, pg3200.txt +critique pg31100.txt, pg3200.txt +critique" pg3200.txt +critique. pg3200.txt +critiques pg3200.txt +crivoscia, pg3200.txt +croak pg3200.txt, pg100.txt +croaking pg3200.txt +crockery pg3200.txt +crockery, pg3200.txt +crockery. pg3200.txt +crocodile pg3200.txt, pg100.txt +crocodile. pg3200.txt, pg100.txt +crocodile? pg100.txt +croft pg31100.txt +croft's pg31100.txt +croft, pg31100.txt +croft. pg31100.txt +crofts pg31100.txt +croix; pg3200.txt +croker pg3200.txt +cromwell pg100.txt +cromwell! pg100.txt +cromwell, pg31100.txt, pg3200.txt, pg100.txt +cromwell. pg3200.txt, pg100.txt +cromwell; pg100.txt +crone. pg100.txt +crook pg3200.txt +crooked pg3200.txt +crooked, pg31100.txt, pg3200.txt +crooked. pg31100.txt +crooked; pg3200.txt +crookedness pg3200.txt +crooks, pg3200.txt +crop pg3200.txt, pg100.txt +crop, pg3200.txt +crop. pg3200.txt, pg100.txt +cropp'd pg100.txt +cropp'd! pg100.txt +cropp'd. pg100.txt +cropped pg3200.txt +cropping pg3200.txt +croppings. pg3200.txt +crops pg3200.txt +crops. pg3200.txt +crops; pg3200.txt +crops? pg3200.txt +cross pg31100.txt, pg3200.txt, pg100.txt +cross!" pg3200.txt +cross'd! pg100.txt +cross'd, pg100.txt +cross'd. pg100.txt +cross'd; pg100.txt +cross, pg31100.txt, pg3200.txt, pg100.txt +cross--nor pg3200.txt +cross-bow pg3200.txt, pg100.txt +cross-exam--" pg3200.txt +cross-examined pg3200.txt +cross-eyed, pg3200.txt +cross-garter'd, pg100.txt +cross-garter'd? pg100.txt +cross-garterd.' pg100.txt +cross-hall pg3200.txt +cross-legged pg3200.txt +cross-questioned. pg3200.txt +cross. pg31100.txt, pg3200.txt, pg100.txt +cross." pg31100.txt +cross; pg31100.txt, pg100.txt +crossbones pg3200.txt +crosse pg3200.txt +crossed pg31100.txt, pg3200.txt +crossed. pg3200.txt +crossed: pg100.txt +crosses pg31100.txt, pg3200.txt +crossing pg31100.txt, pg3200.txt +crossing, pg3200.txt +crossing-sweepership. pg3200.txt +crossing?' pg3200.txt +crossings pg3200.txt +crossings--' pg3200.txt +crossings; pg3200.txt +crossness pg31100.txt +crossness. pg100.txt +crost, pg100.txt +crotchets pg100.txt +crotchets, pg3200.txt +crouch pg3200.txt, pg100.txt +croup pg3200.txt +croup, pg3200.txt +croup-kettle. pg3200.txt +croup." pg3200.txt +croup?" pg3200.txt +croupiers pg3200.txt +crow pg3200.txt, pg100.txt +crow'd, pg100.txt +crow, pg3200.txt, pg100.txt +crow. pg100.txt +crow; pg100.txt +crow? pg100.txt +crowbar pg3200.txt +crowbar. pg3200.txt +crowd pg31100.txt, pg3200.txt +crowd! pg31100.txt +crowd" pg3200.txt +crowd, pg31100.txt, pg3200.txt +crowd--but pg31100.txt +crowd. pg3200.txt +crowd: pg3200.txt +crowd; pg31100.txt +crowded pg3200.txt, pg100.txt +crowded, pg3200.txt +crowded. pg3200.txt +crowding pg3200.txt, pg100.txt +crowding. pg3200.txt +crowding?" pg3200.txt +crowds pg3200.txt +crowds, pg3200.txt +crowds. pg3200.txt +crowed. pg3200.txt +crowing. pg3200.txt +crowkeeper; pg100.txt +crown pg31100.txt, pg3200.txt, pg100.txt +crown! pg3200.txt, pg100.txt +crown!" pg31100.txt, pg3200.txt +crown'. pg3200.txt +crown'd pg100.txt +crown'd, pg100.txt +crown'd. pg100.txt +crown'd: pg100.txt +crown'd; pg100.txt +crown'd? pg100.txt +crown, pg31100.txt, pg3200.txt, pg100.txt +crown- pg100.txt +crown--has pg3200.txt +crown. pg3200.txt, pg100.txt +crown." pg31100.txt, pg3200.txt +crown.' pg100.txt +crown; pg31100.txt, pg100.txt +crown? pg100.txt +crown] pg100.txt +crowned pg3200.txt, pg100.txt +crowned, pg3200.txt, pg100.txt +crowned--dared pg3200.txt +crowned. pg3200.txt +crowning pg3200.txt +crowning, pg3200.txt +crownless pg3200.txt +crowns pg3200.txt, pg100.txt +crowns, pg100.txt +crowns. pg3200.txt, pg100.txt +crowns; pg100.txt +crowns? pg100.txt +crows pg3200.txt, pg100.txt +crows! pg100.txt +crows, pg3200.txt, pg100.txt +crows. pg100.txt +croz pg3200.txt +crucial pg3200.txt +crucified pg3200.txt +crucified. pg3200.txt +crucified; pg3200.txt +crucifix. pg3200.txt +crucifixion pg3200.txt +crucifixion. pg3200.txt +crucifixion? pg3200.txt +crude pg3200.txt +crudely pg3200.txt +cruel pg31100.txt, pg3200.txt, pg100.txt +cruel! pg3200.txt +cruel, pg31100.txt, pg3200.txt, pg100.txt +cruel: pg100.txt +cruel; pg100.txt +cruel? pg100.txt +cruelest pg3200.txt +crueller pg100.txt +cruelly pg31100.txt, pg100.txt +cruelly. pg3200.txt, pg100.txt +cruelly?" pg31100.txt +cruelties pg31100.txt, pg3200.txt +cruelty pg3200.txt, pg100.txt +cruelty! pg100.txt +cruelty, pg3200.txt, pg100.txt +cruelty. pg31100.txt, pg100.txt +cruelty." pg31100.txt +cruelty; pg100.txt +cruise pg3200.txt +cruise. pg3200.txt +crum, pg100.txt +crumble pg3200.txt +crumble. pg3200.txt +crumbled pg3200.txt +crumbled. pg3200.txt +crumbling pg3200.txt +crumbs pg3200.txt +crumbs. pg100.txt +crumpled pg3200.txt +crupper? pg100.txt +crusade pg3200.txt +crusade. pg3200.txt +crusader pg3200.txt +crusaders pg3200.txt +crusades. pg3200.txt +crusading pg3200.txt +crush pg3200.txt, pg100.txt +crush-hat pg3200.txt +crushed pg3200.txt +crushed. pg3200.txt +crusher. pg3200.txt +crushing pg3200.txt +crushings pg3200.txt +crust pg3200.txt, pg100.txt +crust, pg3200.txt +crustily: pg3200.txt +crusts pg3200.txt +crusts. pg100.txt +crutch pg100.txt +crutch! pg100.txt +crutch, pg100.txt +crutch. pg100.txt +crutches pg3200.txt +crutches!" pg3200.txt +crutches, pg3200.txt +crutches. pg3200.txt +crutches.' pg3200.txt +crutches; pg3200.txt +cry pg31100.txt, pg3200.txt, pg100.txt +cry! pg100.txt +cry, pg3200.txt, pg100.txt +cry-- pg3200.txt +cry--gave pg3200.txt +cry-babies. pg3200.txt +cry-baby, pg3200.txt +cry-out pg31100.txt +cry. pg31100.txt, pg3200.txt, pg100.txt +cry." pg3200.txt +cry: pg3200.txt +cry; pg31100.txt, pg3200.txt, pg100.txt +cry? pg3200.txt, pg100.txt +cry?' pg3200.txt +crying pg3200.txt, pg100.txt +crying, pg3200.txt +crying-- pg3200.txt +crying. pg3200.txt +crying: pg3200.txt +crying; pg3200.txt +crystal pg3200.txt +crystal! pg3200.txt +crystal, pg3200.txt +crystal-button, pg100.txt +crystal; pg3200.txt +crystallize pg3200.txt +crystals, pg3200.txt +cts pg3200.txt +cts. pg3200.txt +cub pg3200.txt +cub!" pg3200.txt +cub!' pg3200.txt +cub, pg3200.txt +cub-pilot pg3200.txt +cub. pg3200.txt +cub?' pg3200.txt +cuba." pg3200.txt +cubby. pg3200.txt +cube?" pg3200.txt +cubit pg100.txt +cubs. pg3200.txt +cuckold, pg100.txt +cuckold-mad; pg100.txt +cuckold-maker, pg100.txt +cuckold. pg100.txt +cuckold? pg100.txt +cuckolds. pg100.txt +cuckoo, pg100.txt +cuckoo-clock] pg3200.txt +cucumber. pg31100.txt +cudgel pg100.txt +cudgel, pg3200.txt +cudgel; pg100.txt +cudgeled; pg100.txt +cudgell'd, pg100.txt +cudgels; pg100.txt +cue-owls. pg3200.txt +cue. pg3200.txt, pg100.txt +cues pg3200.txt +cues, pg100.txt +cuff pg100.txt +cuff, pg3200.txt +cuffing pg3200.txt +cuffs, pg3200.txt +cull'd pg100.txt +culled pg3200.txt +cullion. pg100.txt +cullom pg3200.txt +culminated pg3200.txt +culminating pg3200.txt +culmination pg3200.txt +culpable. pg100.txt +culprit's pg3200.txt +cult pg3200.txt +cult. pg3200.txt +cultivate pg3200.txt +cultivated pg31100.txt, pg3200.txt +cultivated. pg3200.txt +cultivated.' pg3200.txt +cultivation pg31100.txt, pg3200.txt +cultivation. pg3200.txt +cults pg3200.txt +culture pg3200.txt +culture, pg3200.txt +culture-standard pg3200.txt +culture-wagon pg3200.txt +culture. pg3200.txt +culture; pg3200.txt +culverin, pg100.txt +culvert pg3200.txt +cumae, pg3200.txt +cumbering pg3200.txt +cumberland, pg31100.txt, pg100.txt +cumbersome pg3200.txt +cumbrous pg3200.txt +cumbrously pg3200.txt +cumming's pg3200.txt +cumming.] pg3200.txt +cumulation, pg3200.txt +cumulative pg3200.txt +cunning pg31100.txt, pg3200.txt, pg100.txt +cunning! pg100.txt +cunning, pg31100.txt, pg100.txt +cunning. pg100.txt +cunningly pg3200.txt, pg100.txt +cunnings- pg100.txt +cunt, pg3200.txt +cup pg31100.txt, pg3200.txt, pg100.txt +cup, pg3200.txt, pg100.txt +cup--i pg3200.txt +cup-bearer. pg3200.txt +cup-clothes, pg3200.txt +cup. pg3200.txt, pg100.txt +cup." pg3200.txt +cup; pg100.txt +cupbearer pg3200.txt +cupbearer; pg100.txt +cupel, pg3200.txt +cupid pg100.txt +cupid! pg100.txt +cupid, pg100.txt +cupid. pg100.txt +cupid; pg100.txt +cupids pg100.txt +cupids, pg100.txt +cups, pg3200.txt, pg100.txt +cups; pg100.txt +cur pg3200.txt, pg100.txt +cur! pg100.txt +cur'd pg100.txt +cur'd, pg100.txt +cur'd. pg100.txt +cur'us: pg3200.txt +cur, pg100.txt +cur--" pg3200.txt +cur? pg100.txt +curacy pg31100.txt +curan. pg100.txt +curate pg100.txt +curate, pg100.txt +curb pg3200.txt +curb, pg100.txt +curbs pg100.txt +curbstone pg3200.txt +curbstone. pg3200.txt +curds, pg100.txt +cure pg31100.txt, pg3200.txt, pg100.txt +cure! pg100.txt +cure, pg3200.txt, pg100.txt +cure-alls. pg3200.txt +cure. pg31100.txt, pg3200.txt, pg100.txt +cure." pg31100.txt, pg3200.txt +cure.--her pg31100.txt +cure: pg100.txt +cure; pg31100.txt +cure;--not pg31100.txt +cure? pg100.txt +cured pg31100.txt, pg3200.txt +cured. pg100.txt +curepipe pg3200.txt +curer pg100.txt +curers pg3200.txt +cures pg100.txt +cures, pg100.txt +curfew, pg100.txt +curiassities!" pg3200.txt +curing pg100.txt +curio? pg100.txt +curiosities pg31100.txt, pg3200.txt +curiosities, pg3200.txt +curiosities--sabbath pg3200.txt +curiosities. pg3200.txt +curiosity pg31100.txt, pg3200.txt +curiosity, pg31100.txt, pg3200.txt +curiosity--go pg3200.txt +curiosity-breeding pg3200.txt +curiosity. pg31100.txt, pg3200.txt +curiosity:-- pg31100.txt +curiosity; pg31100.txt, pg3200.txt +curious pg31100.txt, pg3200.txt +curious, pg31100.txt, pg3200.txt +curious. pg31100.txt, pg3200.txt +curious." pg3200.txt +curiously pg3200.txt +curiously, pg3200.txt +curiously. pg3200.txt +curl pg3200.txt +curl!" pg3200.txt +curled pg3200.txt +curled-up pg3200.txt +curling pg3200.txt +curls pg3200.txt +curls, pg100.txt +curls; pg100.txt +currency." pg3200.txt +currency?" pg3200.txt +current pg31100.txt, pg3200.txt +current, pg100.txt +current. pg3200.txt, pg100.txt +current.' pg3200.txt +current; pg3200.txt +current? pg3200.txt +curricle pg31100.txt +curricle?" pg31100.txt +currish pg100.txt +currish. pg100.txt +curry pg3200.txt +curry, pg3200.txt +curry-comb, pg3200.txt +curry." pg3200.txt +curs! pg100.txt +curs'd pg100.txt +curs'd? pg100.txt +curs, pg100.txt +curs. pg100.txt +curs; pg100.txt +curse pg3200.txt, pg100.txt +curse! pg100.txt +curse, pg100.txt +curse- pg100.txt +curse. pg100.txt +curse." pg31100.txt +curse: pg100.txt +curse; pg100.txt +curse? pg100.txt +cursed pg3200.txt, pg100.txt +cursed, pg3200.txt +cursed. pg3200.txt +curses pg3200.txt, pg100.txt +curses! pg100.txt +curses, pg3200.txt, pg100.txt +curses. pg100.txt +curses; pg100.txt +cursing pg3200.txt, pg100.txt +cursing, pg3200.txt +cursings. pg3200.txt +cursory pg3200.txt +curst! pg100.txt +curst, pg100.txt +curst. pg100.txt +curst.' pg100.txt +curst; pg100.txt +cursy. pg100.txt +curtail pg3200.txt +curtailed pg3200.txt +curtailed) pg3200.txt +curtailed? pg3200.txt +curtain pg31100.txt, pg3200.txt, pg100.txt +curtain, pg3200.txt +curtain. pg3200.txt, pg100.txt +curtain? pg100.txt +curtained pg3200.txt +curtains pg3200.txt +curtains, pg3200.txt +curtains. pg31100.txt, pg100.txt +curtains.] pg100.txt +curtesye. pg3200.txt +curtis pg100.txt +curtis! pg100.txt +curtis. pg100.txt +curtsey, pg31100.txt +curtsies. pg100.txt +curtsy pg100.txt +curtsy, pg100.txt +curtsy. pg100.txt +curve pg3200.txt +curve, pg3200.txt +curved pg3200.txt +curves pg3200.txt +curvet pg100.txt +curvets pg100.txt +curving pg3200.txt +cushion, pg100.txt +cushion--he pg3200.txt +cushion. pg100.txt +cushioned pg3200.txt +cushions pg3200.txt +cushions, pg3200.txt +cushions? pg100.txt +cusiomhouse, pg3200.txt +cuss pg3200.txt +cuss'd pg3200.txt +cuss; pg3200.txt +cussed pg3200.txt +cussed, pg3200.txt +cussing, pg3200.txt +cussing. pg3200.txt +custalorum. pg100.txt +custard. pg3200.txt +custards. pg3200.txt +custodian. pg3200.txt +custody pg31100.txt +custody. pg100.txt +custody? pg100.txt +custom pg31100.txt, pg3200.txt, pg100.txt +custom, pg3200.txt, pg100.txt +custom--" pg3200.txt +custom-house, pg3200.txt +custom-shrunk. pg100.txt +custom-would, pg3200.txt +custom. pg31100.txt, pg3200.txt +custom." pg31100.txt +custom: pg3200.txt +custom; pg3200.txt, pg100.txt +custom? pg3200.txt, pg100.txt +customarily pg3200.txt +customary pg3200.txt +customary." pg3200.txt +customer pg3200.txt +customer's pg3200.txt +customer, pg3200.txt +customer--his pg3200.txt +customer. pg100.txt +customer: pg3200.txt +customers pg3200.txt +customers' pg3200.txt +customers, pg3200.txt +customers." pg3200.txt +customers; pg3200.txt +customers? pg100.txt +customhouse pg3200.txt +customhouse. pg3200.txt +customs pg3200.txt +customs, pg3200.txt, pg100.txt +customs. pg3200.txt +customs." pg3200.txt +cut pg31100.txt, pg3200.txt, pg100.txt +cut, pg100.txt +cut- pg3200.txt +cut-and-dried, pg3200.txt +cut-off pg3200.txt +cut-offs pg3200.txt +cut. pg31100.txt, pg3200.txt, pg100.txt +cut." pg31100.txt +cut.' pg3200.txt, pg100.txt +cut; pg100.txt +cut? pg3200.txt, pg100.txt +cuteness, pg3200.txt +cuticle pg3200.txt +cutler: pg3200.txt +cutlet pg3200.txt +cutlets. pg31100.txt +cutoff pg3200.txt +cutoffs pg3200.txt +cutpurse. pg100.txt +cuts pg3200.txt +cutter pg100.txt +cutter, pg3200.txt +cutter. pg3200.txt +cutting pg31100.txt, pg3200.txt +cutting, pg3200.txt +cyclone pg3200.txt +cyclone. pg3200.txt +cyclopedia pg3200.txt +cydnus, pg100.txt +cydnus. pg100.txt +cylinder pg3200.txt +cylinder-head." pg3200.txt +cymbals pg3200.txt +cymbeline pg100.txt +cymbeline, pg100.txt +cynicisms pg3200.txt +cyprus pg100.txt +cyprus. pg100.txt +cyprus; pg100.txt +cytherea, pg100.txt +czar pg3200.txt +czar's pg3200.txt +czar. pg3200.txt +czar." pg3200.txt +czar.' pg3200.txt +czarship; pg3200.txt +d', pg3200.txt +d'affaires pg3200.txt +d'alencon pg3200.txt +d'alencon, pg3200.txt +d'alencon. pg3200.txt +d'alencon?" pg3200.txt +d'angleterre. pg100.txt +d'apres pg3200.txt +d'arc pg3200.txt +d'arc. pg3200.txt +d'arc: pg3200.txt +d'arc?" pg3200.txt +d'armee." pg3200.txt +d'armee.'" pg3200.txt +d'arthur.--m.t.] pg3200.txt +d'arve pg3200.txt +d'arve. pg3200.txt +d'aulon pg3200.txt +d'aulon, pg3200.txt +d'elbow. pg100.txt +d'etat." pg3200.txt +d'europe!" pg3200.txt +d'ho^te pg3200.txt +d'ho^te. pg3200.txt +d'hote pg3200.txt +d'hote. pg3200.txt +d'ici pg3200.txt +d'if, pg3200.txt +d'italia pg3200.txt +d'un pg100.txt +d'unlap. pg3200.txt +d---- pg31100.txt +d---d pg3200.txt +d---d!" pg3200.txt +d---dest pg3200.txt +d---n. pg3200.txt +d--uncomfortable, pg31100.txt +d-r-a-w-l--baby!" pg3200.txt +d. pg3200.txt, pg100.txt +d.]} pg3200.txt +d.c.l., pg3200.txt +d.w. pg3200.txt +d.w.] pg3200.txt +dab pg31100.txt +dabbler; pg3200.txt +dacent pg3200.txt +dad. pg100.txt +daemon pg31100.txt +daffed, pg100.txt +daffodils, pg100.txt +daffy pg3200.txt +dagger pg100.txt +dagger! pg100.txt +dagger, pg100.txt +dagger. pg100.txt +dagger." pg3200.txt +dagger.] pg100.txt +dagger] pg100.txt +daggers pg3200.txt, pg100.txt +daggers! pg31100.txt +daggers, pg100.txt +daggett pg3200.txt +daguerreotype-case. pg3200.txt +dah, pg3200.txt +dah." pg3200.txt +dah?" pg3200.txt +dahlweiner. pg3200.txt +dailies pg3200.txt +dailies, pg3200.txt +dailies. pg3200.txt +daily pg31100.txt, pg3200.txt, pg100.txt +daily" pg3200.txt +daily, pg31100.txt, pg3200.txt, pg100.txt +daily. pg3200.txt +daily; pg3200.txt +daintier pg3200.txt, pg100.txt +daintiest pg3200.txt +daintily pg3200.txt +daintily, pg3200.txt +daintiness. pg3200.txt +dainty pg3200.txt +dainty, pg100.txt +dainty-wrought pg3200.txt +dairy pg3200.txt +daisy! pg3200.txt +daisy-cutter pg3200.txt +daisy. pg3200.txt, pg100.txt +daisy; pg3200.txt +dale pg31100.txt +dale, pg100.txt +dale. pg100.txt +dalin' pg3200.txt +dalliance pg100.txt +dalliance. pg100.txt +dallied pg100.txt +dally. pg100.txt +dallying pg3200.txt +dallying. pg100.txt +dalmatians pg100.txt +dalmatians, pg100.txt +dalrymple pg31100.txt +dalrymple," pg31100.txt +dalrymple. pg31100.txt +daly pg3200.txt +daly's pg3200.txt +daly, pg3200.txt +daly. pg3200.txt +daly." pg3200.txt +dam pg3200.txt, pg100.txt +dam! pg100.txt +dam'd pg3200.txt +dam, pg100.txt +dam-- pg3200.txt +dam. pg100.txt +dam." pg3200.txt +dam; pg100.txt +dam? pg100.txt +damage pg3200.txt +damage, pg3200.txt +damage--and pg3200.txt +damage. pg31100.txt, pg3200.txt, pg100.txt +damage.' pg3200.txt +damaged pg3200.txt +damaged, pg3200.txt +damaged; pg3200.txt +damages pg3200.txt, pg100.txt +damages. pg31100.txt, pg3200.txt, pg100.txt +damaging pg3200.txt +damaging. pg3200.txt +damascus pg3200.txt +damascus, pg3200.txt +damascus. pg3200.txt +damask pg100.txt +damask, pg3200.txt +damask. pg100.txt +dame pg100.txt +dame, pg3200.txt, pg100.txt +dame? pg100.txt +damen. pg3200.txt +dames pg100.txt +damiens' pg3200.txt +damm'd pg100.txt +damn pg3200.txt, pg100.txt +damn'd pg3200.txt, pg100.txt +damn'd, pg100.txt +damn'd. pg100.txt +damn." pg3200.txt +damnable. pg3200.txt, pg100.txt +damnation pg3200.txt, pg100.txt +damnation! pg3200.txt, pg100.txt +damnation, pg100.txt +damned pg3200.txt +damned. pg3200.txt +damned.... pg3200.txt +damning pg3200.txt +damning. pg3200.txt +damp pg31100.txt, pg3200.txt, pg100.txt +damp, pg3200.txt +damp. pg3200.txt +damp." pg31100.txt +damped pg3200.txt +dampened. pg3200.txt +damrell. pg3200.txt +damrosch pg3200.txt +damsel pg3200.txt +damsel. pg100.txt +damsels pg3200.txt +damsons pg100.txt +dan pg3200.txt +dan'l pg3200.txt +dan'l!" pg3200.txt +dan'l, pg3200.txt +dan'l. pg3200.txt +dan'l? pg3200.txt +dan'l?" pg3200.txt +dan, pg3200.txt +dan. pg3200.txt +dana pg3200.txt +dana's, pg3200.txt +dana. pg3200.txt +danc'd pg100.txt +dance pg31100.txt, pg3200.txt, pg100.txt +dance!" pg31100.txt +dance, pg31100.txt, pg3200.txt, pg100.txt +dance. pg31100.txt, pg3200.txt, pg100.txt +dance." pg31100.txt +dance; pg31100.txt, pg3200.txt, pg100.txt +dance? pg100.txt +dance?" pg31100.txt +dance] pg100.txt +dance]. pg100.txt +danced pg31100.txt, pg3200.txt +danced, pg31100.txt +danced--for pg31100.txt +danced. pg3200.txt +dancer! pg100.txt +dancer, pg3200.txt +dancer. pg3200.txt +dancer." pg31100.txt +dancers--entrance pg3200.txt +dances pg31100.txt, pg3200.txt +dances!" pg31100.txt +dances, pg31100.txt, pg100.txt +dancing pg31100.txt, pg3200.txt +dancing, pg31100.txt, pg3200.txt, pg100.txt +dancing,--not pg31100.txt +dancing-hall. pg3200.txt +dancing-schools pg100.txt +dancing. pg3200.txt +dancing; pg100.txt +dancing?" pg31100.txt +dancing?--was pg31100.txt +dandy, pg3200.txt +dane pg100.txt +dane, pg100.txt +dane. pg3200.txt, pg100.txt +danger pg31100.txt, pg3200.txt, pg100.txt +danger! pg31100.txt, pg3200.txt +danger, pg31100.txt, pg3200.txt, pg100.txt +danger-line. pg3200.txt +danger. pg31100.txt, pg3200.txt, pg100.txt +danger." pg31100.txt, pg3200.txt +danger; pg31100.txt, pg3200.txt, pg100.txt +danger?" pg31100.txt +danger?' pg3200.txt +dangerous pg31100.txt, pg3200.txt, pg100.txt +dangerous! pg100.txt +dangerous, pg31100.txt, pg3200.txt, pg100.txt +dangerous--since pg3200.txt +dangerous--why, pg3200.txt +dangerous. pg31100.txt, pg3200.txt, pg100.txt +dangerous." pg3200.txt +dangerous; pg31100.txt, pg3200.txt, pg100.txt +dangerous?" pg3200.txt +dangerously pg3200.txt +dangerously; pg100.txt +dangers pg3200.txt, pg100.txt +dangers, pg100.txt +dangers." pg31100.txt +dangersome pg3200.txt +dangersome,"--and pg3200.txt +dangle pg3200.txt +dangled pg3200.txt +dangling pg3200.txt +dangling. pg3200.txt +daniel pg3200.txt +daniel! pg100.txt +daniel, pg3200.txt +danish pg3200.txt, pg100.txt +dank pg3200.txt +dank! pg3200.txt +danvers pg31100.txt +dar'd pg100.txt +dar'st pg100.txt +dar'st, pg100.txt +dar'st. pg100.txt +darcy pg31100.txt +darcy! pg31100.txt +darcy!" pg31100.txt +darcy" pg31100.txt +darcy's pg31100.txt +darcy's." pg31100.txt +darcy's; pg31100.txt +darcy, pg31100.txt +darcy," pg31100.txt +darcy. pg31100.txt +darcy." pg31100.txt +darcy: pg31100.txt +darcy?" pg31100.txt +dardanelles, pg3200.txt +dardanius! pg100.txt +dare pg31100.txt, pg3200.txt, pg100.txt +dare! pg3200.txt +dare!" pg3200.txt +dare, pg100.txt +dare- pg100.txt +dare--" pg31100.txt, pg3200.txt +dare. pg3200.txt, pg100.txt +dare." pg31100.txt +dare; pg100.txt +dared pg31100.txt, pg3200.txt +dared, pg31100.txt +dared. pg100.txt +daren't!" pg3200.txt +dares pg3200.txt, pg100.txt +dares, pg100.txt +dares. pg100.txt +darest. pg100.txt +daring pg31100.txt, pg3200.txt +daring, pg3200.txt +daring. pg3200.txt +daring; pg3200.txt +darius, pg100.txt +darjeeling, pg3200.txt +darjeeling. pg3200.txt +dark pg31100.txt, pg3200.txt, pg100.txt +dark, pg31100.txt, pg3200.txt, pg100.txt +dark-complected--just pg3200.txt +dark. pg3200.txt, pg100.txt +dark." pg31100.txt, pg3200.txt +dark: pg3200.txt +dark; pg31100.txt, pg3200.txt, pg100.txt +dark? pg3200.txt, pg100.txt +dark] pg3200.txt +darken pg100.txt +darkened pg3200.txt +darkens, pg3200.txt +darker pg31100.txt +darker." pg31100.txt +darkest, pg3200.txt +darkling. pg100.txt +darkly pg3200.txt, pg100.txt +darkness pg3200.txt, pg100.txt +darkness, pg3200.txt, pg100.txt +darkness- pg100.txt +darkness--darkness pg3200.txt +darkness--that pg3200.txt +darkness. pg3200.txt, pg100.txt +darkness." pg31100.txt +darkness.' pg3200.txt +darkness; pg3200.txt +darky. pg3200.txt +darley pg3200.txt +darley's." pg3200.txt +darley: pg3200.txt +darling pg3200.txt +darling!" pg3200.txt +darling, pg3200.txt, pg100.txt +darling--" pg3200.txt +darling. pg3200.txt +darling; pg31100.txt +darlings pg3200.txt +darlings, pg3200.txt +darnach? pg3200.txt +dart pg3200.txt +darted pg31100.txt, pg3200.txt +darting pg3200.txt +darts pg3200.txt, pg100.txt +darts, pg100.txt +darwin pg3200.txt +darwin's pg3200.txt +darwin,.....898 pg3200.txt +darwin. pg3200.txt +das pg3200.txt +das'n't pg3200.txt +dash pg31100.txt, pg3200.txt +dash'd. pg100.txt +dash-dashed pg3200.txt +dash. pg100.txt +dashed pg3200.txt +dasher pg3200.txt +dashing pg3200.txt +dashwood pg31100.txt +dashwood's." pg31100.txt +dashwood, pg31100.txt +dashwood--it pg31100.txt +dashwood. pg31100.txt +dashwood; pg31100.txt +dashwood? pg31100.txt +dashwood?" pg31100.txt +dashwoods pg31100.txt +dashwoods, pg31100.txt +dashwoods; pg31100.txt +dasn't pg3200.txt +dasn't. pg3200.txt +dass pg3200.txt +dast pg3200.txt +dasture. pg3200.txt +dat pg3200.txt, pg100.txt +dat!" pg3200.txt +dat's pg3200.txt +dat, pg3200.txt +dat--" pg3200.txt +dat--dat pg3200.txt +dat. pg3200.txt, pg100.txt +dat." pg3200.txt +dat? pg100.txt +dat?" pg3200.txt +data pg3200.txt +datchet pg100.txt +date pg31100.txt, pg3200.txt, pg100.txt +date) pg3200.txt +date, pg3200.txt, pg100.txt +date--hot pg3200.txt +date. pg3200.txt, pg100.txt +date." pg3200.txt +date.] pg3200.txt +date: pg100.txt +date; pg3200.txt +dated pg31100.txt, pg3200.txt +dated. pg3200.txt +dates pg31100.txt, pg3200.txt, pg100.txt +dates, pg31100.txt, pg3200.txt +dating pg3200.txt +daub'ry pg100.txt +daubensee, pg3200.txt +daubing pg3200.txt +daubs?" pg3200.txt +daughter pg31100.txt, pg3200.txt, pg100.txt +daughter! pg31100.txt, pg100.txt +daughter!" pg3200.txt +daughter's pg31100.txt +daughter's." pg31100.txt +daughter's; pg31100.txt +daughter) pg31100.txt +daughter), pg100.txt +daughter, pg31100.txt, pg3200.txt, pg100.txt +daughter," pg31100.txt +daughter- pg100.txt +daughter-in-law pg31100.txt, pg100.txt +daughter-in-law, pg31100.txt +daughter-in-law. pg100.txt +daughter. pg31100.txt, pg3200.txt, pg100.txt +daughter." pg31100.txt +daughter: pg100.txt +daughter; pg31100.txt, pg3200.txt, pg100.txt +daughter? pg3200.txt, pg100.txt +daughter?" pg3200.txt +daughters pg31100.txt, pg3200.txt, pg100.txt +daughters! pg100.txt +daughters!' pg3200.txt +daughters' pg31100.txt +daughters', pg31100.txt +daughters, pg31100.txt, pg3200.txt, pg100.txt +daughters. pg31100.txt, pg3200.txt, pg100.txt +daughters." pg31100.txt +daughters; pg100.txt +daunted. pg100.txt +dauntless pg3200.txt +dauphin pg3200.txt, pg100.txt +dauphin! pg100.txt +dauphin!" pg3200.txt +dauphin's pg3200.txt +dauphin, pg3200.txt, pg100.txt +dauphin. pg100.txt +dauphin." pg3200.txt +dauphin: pg100.txt +dauphin? pg100.txt +dave pg3200.txt +dave, pg3200.txt +dave." pg3200.txt +davenport pg3200.txt +davidson, pg3200.txt +davis pg3200.txt +davis, pg3200.txt +davis; pg3200.txt +davouassoux, pg3200.txt +davy pg100.txt +davy! pg100.txt +davy. pg3200.txt, pg100.txt +davy; pg100.txt +daw. pg100.txt +dawdling pg31100.txt +dawlish." pg31100.txt +dawn pg3200.txt +dawn!" pg3200.txt +dawn, pg3200.txt, pg100.txt +dawn--less pg3200.txt +dawn-days pg3200.txt +dawn. pg3200.txt, pg100.txt +dawn." pg3200.txt +dawn; pg3200.txt +dawned pg3200.txt +dawned, pg3200.txt +dawned. pg31100.txt +dawning pg100.txt +daws! pg100.txt +daws, pg100.txt +dawson's, pg3200.txt +day pg31100.txt, pg3200.txt, pg100.txt +day! pg31100.txt, pg3200.txt, pg100.txt +day!" pg31100.txt, pg3200.txt +day!' pg100.txt +day!- pg100.txt +day!--and pg3200.txt +day!--never pg31100.txt +day's pg31100.txt, pg3200.txt, pg100.txt +day) pg3200.txt +day, pg31100.txt, pg3200.txt, pg100.txt +day," pg3200.txt +day,' pg31100.txt +day- pg100.txt +day-- pg3200.txt +day--" pg3200.txt +day--and pg3200.txt +day--clear pg3200.txt +day--faultlessly pg3200.txt +day--great pg3200.txt +day--it pg3200.txt +day--maybe pg3200.txt +day--morning pg3200.txt +day--people pg3200.txt +day--think pg3200.txt +day--two pg3200.txt +day--which pg31100.txt +day-before-yesterday pg3200.txt +day-before-yesterday. pg3200.txt +day-dreams,--they pg3200.txt +day-thoughts? pg3200.txt +day-time.' pg3200.txt +day. pg31100.txt, pg3200.txt, pg100.txt +day." pg31100.txt, pg3200.txt +day.' pg3200.txt, pg100.txt +day.) pg3200.txt +day.--i pg31100.txt +day: pg31100.txt, pg3200.txt, pg100.txt +day; pg31100.txt, pg3200.txt, pg100.txt +day? pg3200.txt, pg100.txt +day?" pg31100.txt, pg3200.txt +day?' pg3200.txt +daybreak pg3200.txt +daybreak--and pg3200.txt +daylight pg3200.txt, pg100.txt +daylight, pg3200.txt, pg100.txt +daylight. pg3200.txt, pg100.txt +daylight." pg3200.txt +daylight; pg3200.txt +days pg31100.txt, pg3200.txt, pg100.txt +days! pg3200.txt, pg100.txt +days!" pg3200.txt +days' pg31100.txt, pg3200.txt +days, pg31100.txt, pg3200.txt, pg100.txt +days," pg3200.txt +days,--raising pg3200.txt +days- pg100.txt +days-- pg3200.txt +days--and pg3200.txt +days--christmas pg3200.txt +days--for pg3200.txt +days--our pg3200.txt +days--then pg3200.txt +days-days pg3200.txt +days. pg31100.txt, pg3200.txt, pg100.txt +days." pg31100.txt, pg3200.txt +days.' pg3200.txt +days: pg3200.txt, pg100.txt +days; pg31100.txt, pg3200.txt, pg100.txt +days? pg3200.txt, pg100.txt +days?" pg3200.txt +daytime pg3200.txt +daytime!--anybody pg3200.txt +daytime, pg3200.txt +daytime. pg3200.txt +daytime." pg3200.txt +daytime; pg3200.txt +daytime?" pg3200.txt +daytimes. pg3200.txt +dayton pg3200.txt +dazed pg3200.txt +dazed, pg3200.txt +dazed; pg3200.txt +dazu pg3200.txt +dazzle. pg3200.txt +dazzling pg3200.txt +dazzling, pg3200.txt +de'bris pg3200.txt +deacon pg3200.txt +deacons pg3200.txt +dead pg31100.txt, pg3200.txt, pg100.txt +dead! pg3200.txt, pg100.txt +dead!" pg3200.txt +dead!' pg3200.txt +dead' pg3200.txt +dead, pg31100.txt, pg3200.txt, pg100.txt +dead,' pg100.txt +dead,--and pg3200.txt +dead- pg100.txt +dead--" pg3200.txt +dead--and pg3200.txt +dead--but pg3200.txt +dead--or pg3200.txt +dead--whereas pg3200.txt +dead-and-alive; pg3200.txt +dead-beats. pg3200.txt +dead-letter pg3200.txt +dead-level, pg3200.txt +dead-levels. pg3200.txt +dead-white pg3200.txt +dead. pg31100.txt, pg3200.txt, pg100.txt +dead." pg3200.txt +dead.' pg100.txt +dead: pg3200.txt, pg100.txt +dead:-- pg3200.txt +dead; pg3200.txt, pg100.txt +dead? pg3200.txt, pg100.txt +dead?" pg3200.txt +dead?- pg100.txt +deader pg3200.txt +deadest pg3200.txt +deadly pg3200.txt +deadly, pg100.txt +deadly. pg100.txt +deadly? pg100.txt +deaf pg3200.txt +deaf, pg100.txt +deaf. pg31100.txt, pg100.txt +deaf? pg100.txt +deafening pg3200.txt +deafening, pg3200.txt +deafening; pg3200.txt +deafness. pg100.txt +deal pg31100.txt, pg3200.txt, pg100.txt +deal, pg31100.txt, pg3200.txt, pg100.txt +deal. pg3200.txt +deal." pg31100.txt, pg3200.txt +deal.' pg3200.txt +deal: pg31100.txt, pg3200.txt +deal; pg31100.txt, pg3200.txt +deal?" pg31100.txt +dealer pg3200.txt +dealers--preparations pg3200.txt +dealing pg3200.txt +dealing! pg100.txt +dealing. pg100.txt +dealing? pg100.txt +dealings pg31100.txt, pg3200.txt +dealings, pg3200.txt +deals pg3200.txt +deals. pg3200.txt +dealt pg3200.txt +dealt, pg100.txt +dealt. pg3200.txt +dean pg3200.txt +dean-- pg3200.txt +deane, pg3200.txt +dear pg31100.txt, pg3200.txt, pg100.txt +dear! pg31100.txt, pg3200.txt, pg100.txt +dear!" pg3200.txt +dear!' pg100.txt +dear" pg3200.txt +dear, pg31100.txt, pg3200.txt, pg100.txt +dear," pg31100.txt +dear,' pg31100.txt +dear--" pg3200.txt +dear-a? pg100.txt +dear-me-suz, pg3200.txt +dear. pg31100.txt, pg3200.txt, pg100.txt +dear." pg31100.txt, pg3200.txt +dear.' pg3200.txt +dear: pg100.txt +dear; pg31100.txt, pg3200.txt, pg100.txt +dear? pg3200.txt, pg100.txt +dear?" pg31100.txt, pg3200.txt +dearer pg31100.txt, pg3200.txt, pg100.txt +dearer, pg100.txt +dearer. pg100.txt +dearer." pg31100.txt +dearest pg31100.txt, pg3200.txt, pg100.txt +dearest! pg3200.txt +dearest!" pg3200.txt +dearest; pg100.txt +dearly pg31100.txt, pg3200.txt, pg100.txt +dearly, pg100.txt +dearly. pg100.txt +dearly." pg31100.txt, pg3200.txt +dearly; pg3200.txt +dearly? pg100.txt +dears! pg3200.txt +dears; pg100.txt +dearth pg100.txt +dearth, pg100.txt +dearth. pg100.txt +death pg31100.txt, pg3200.txt, pg100.txt +death! pg3200.txt, pg100.txt +death!" pg3200.txt +death!' pg100.txt +death" pg3200.txt +death's pg100.txt +death's-bed-got pg100.txt +death's-head; pg100.txt +death's. pg100.txt +death) pg3200.txt +death), pg100.txt +death, pg31100.txt, pg3200.txt, pg100.txt +death- pg100.txt +death--" pg3200.txt +death--a pg31100.txt +death--and pg3200.txt +death--arrival pg3200.txt +death--none pg3200.txt +death--this pg3200.txt +death--uninvited. pg3200.txt +death-agony. pg3200.txt +death-on-the-pale- pg3200.txt +death-picture. pg3200.txt +death-rate pg3200.txt +death-rattle. pg3200.txt +death-room, pg3200.txt +death-sentinel pg3200.txt +death-shriek, pg3200.txt +death. pg31100.txt, pg3200.txt, pg100.txt +death." pg31100.txt, pg3200.txt +death.' pg3200.txt, pg100.txt +death.]--has pg3200.txt +death: pg3200.txt, pg100.txt +death; pg31100.txt, pg3200.txt, pg100.txt +death? pg3200.txt, pg100.txt +death?" pg31100.txt, pg3200.txt +deathbed. pg100.txt +deathbed; pg100.txt +deathless pg3200.txt +deathlike pg31100.txt +deaths pg3200.txt, pg100.txt +deaths, pg100.txt +deaths. pg100.txt +deaths: pg100.txt +deaths; pg100.txt +deaths? pg100.txt +debarred pg3200.txt +debase pg3200.txt, pg100.txt +debasing pg3200.txt +debate pg100.txt +debate, pg100.txt +debate. pg3200.txt, pg100.txt +debate." pg3200.txt +debated pg31100.txt, pg3200.txt +debated. pg100.txt +debatement, pg100.txt +debating pg31100.txt, pg100.txt +debauch'd, pg100.txt +debauch. pg3200.txt +debauchery pg3200.txt +debility, pg3200.txt +debility; pg100.txt +deborah, pg3200.txt +deborah. pg100.txt +debris. pg3200.txt +debt pg31100.txt, pg3200.txt, pg100.txt +debt, pg3200.txt, pg100.txt +debt--virginia pg3200.txt +debt. pg3200.txt, pg100.txt +debt: pg100.txt +debt; pg31100.txt, pg100.txt +debt?" pg3200.txt +debtless. pg3200.txt +debtor pg100.txt +debtor, pg100.txt +debtor. pg100.txt +debts pg31100.txt, pg3200.txt, pg100.txt +debts, pg31100.txt, pg100.txt +debts. pg3200.txt, pg100.txt +debts." pg31100.txt +debts.--alas! pg31100.txt +debts; pg31100.txt +debut pg3200.txt +debuts! pg3200.txt +dec'd pg3200.txt +dec. pg31100.txt +decade pg3200.txt +decadence pg3200.txt +decades pg3200.txt +decades! pg3200.txt +decades, pg3200.txt +decameron. pg3200.txt +decamping pg3200.txt +decay pg3200.txt, pg100.txt +decay! pg100.txt +decay'd. pg100.txt +decay, pg31100.txt, pg3200.txt, pg100.txt +decay- pg100.txt +decay--it pg3200.txt +decay--perpetuators pg3200.txt +decay. pg3200.txt, pg100.txt +decay." pg3200.txt +decay; pg3200.txt +decay?" pg3200.txt +decay?' pg3200.txt +decayed, pg100.txt +decayer pg100.txt +decaying pg3200.txt +decays pg100.txt +decays! pg100.txt +decays? pg100.txt +deceas'd pg100.txt +deceas'd, pg100.txt +deceas'd. pg100.txt +deceas'd; pg100.txt +decease pg31100.txt +decease, pg3200.txt, pg100.txt +decease- pg100.txt +decease. pg31100.txt, pg100.txt +decease: pg100.txt +deceased pg31100.txt, pg3200.txt +deceased's pg3200.txt +deceased) pg3200.txt +deceased), pg3200.txt +deceased, pg3200.txt +deceased-from pg3200.txt +deceased. pg3200.txt +deceit pg100.txt +deceit! pg31100.txt +deceit, pg100.txt +deceit. pg100.txt +deceit; pg100.txt +deceit? pg31100.txt, pg100.txt +deceitful pg31100.txt +deceitful, pg100.txt +deceitful?" pg31100.txt +deceits pg3200.txt +deceits? pg100.txt +deceiv'd pg100.txt +deceiv'd, pg100.txt +deceiv'd. pg100.txt +deceiv'd; pg100.txt +deceive pg31100.txt, pg3200.txt, pg100.txt +deceive, pg3200.txt, pg100.txt +deceive. pg3200.txt, pg100.txt +deceived pg31100.txt, pg3200.txt +deceived!--meeting pg31100.txt +deceived, pg31100.txt, pg3200.txt, pg100.txt +deceived. pg3200.txt, pg100.txt +deceived." pg31100.txt +deceived; pg31100.txt +deceives pg100.txt +deceives, pg100.txt +deceivest pg100.txt +deceiving pg31100.txt, pg3200.txt, pg100.txt +december pg3200.txt +december, pg3200.txt, pg100.txt +december- pg100.txt +december. pg3200.txt +december." pg3200.txt +decencies, pg3200.txt +decency pg31100.txt, pg3200.txt +decency, pg3200.txt +decency; pg3200.txt +decent pg31100.txt, pg3200.txt +decent. pg3200.txt +deception pg3200.txt +deceptions pg31100.txt +deceptions!--i pg3200.txt +deceptions. pg3200.txt +decerns pg100.txt +decide pg31100.txt, pg3200.txt, pg100.txt +decide. pg3200.txt +decide." pg3200.txt +decided pg31100.txt, pg3200.txt +decided, pg31100.txt, pg3200.txt +decided. pg3200.txt +decided." pg31100.txt +decided: pg3200.txt +decidedly pg31100.txt +decidedly, pg31100.txt +decidedly: pg3200.txt +decides pg100.txt +deciding pg31100.txt, pg3200.txt +deciding, pg31100.txt +decipher'd pg100.txt +decipherable; pg3200.txt +deciphered pg3200.txt +decision pg31100.txt, pg3200.txt, pg100.txt +decision, pg31100.txt, pg3200.txt +decision--and pg3200.txt +decision. pg3200.txt +decision." pg3200.txt +decision: pg3200.txt +decisions pg31100.txt, pg3200.txt +decisive pg3200.txt +decisive. pg31100.txt, pg3200.txt +decisively pg31100.txt +decius. pg100.txt +decius? pg3200.txt +deck pg3200.txt +deck! pg100.txt +deck!" pg3200.txt +deck, pg3200.txt, pg100.txt +deck-- pg3200.txt +deck-hand; pg3200.txt +deck-hands pg3200.txt +deck-house, pg3200.txt +deck-sweep. pg3200.txt +deck. pg3200.txt, pg100.txt +deck: pg3200.txt +deck? pg3200.txt +decked pg3200.txt +decks pg3200.txt +deckt, pg100.txt +declaration pg31100.txt, pg3200.txt +declaration, pg31100.txt, pg3200.txt +declaration. pg31100.txt +declaration; pg31100.txt +declarations pg31100.txt +declare pg31100.txt, pg3200.txt, pg100.txt +declare! pg31100.txt +declare, pg31100.txt, pg3200.txt, pg100.txt +declare--" pg31100.txt +declare. pg31100.txt +declare." pg31100.txt +declared pg31100.txt, pg3200.txt +declared, pg3200.txt +declared,* pg31100.txt +declared: pg3200.txt +declared; pg31100.txt +declares pg31100.txt, pg3200.txt +declaring pg31100.txt, pg3200.txt +declension, pg100.txt +declensions pg100.txt +declination pg3200.txt +decline pg31100.txt, pg3200.txt, pg100.txt +decline, pg3200.txt +decline. pg3200.txt, pg100.txt +declined pg31100.txt, pg3200.txt, pg100.txt +declined, pg31100.txt, pg3200.txt +declined," pg31100.txt +declined-- pg3200.txt +declined. pg31100.txt +declined; pg3200.txt, pg100.txt +declines, pg100.txt +declining pg31100.txt, pg3200.txt +declivities pg3200.txt +declivity pg3200.txt +declivity, pg3200.txt +decom. pg3200.txt +decomposed pg3200.txt +decomposer pg3200.txt +decomposer, pg3200.txt +decomposition pg3200.txt +decorated pg3200.txt +decorated, pg3200.txt +decorated. pg3200.txt +decorating pg3200.txt +decoration pg3200.txt +decoration, pg3200.txt +decoration. pg3200.txt +decorations pg3200.txt +decorations, pg3200.txt +decorations. pg3200.txt +decorative pg3200.txt +decorous pg3200.txt +decorously, pg3200.txt +decorum pg31100.txt, pg3200.txt +decorum, pg31100.txt, pg3200.txt, pg100.txt +decorum. pg100.txt +decorum." pg31100.txt +decorum."--emma pg31100.txt +decoyed pg3200.txt +decreas'd. pg100.txt +decrease pg100.txt +decrease, pg100.txt +decreasing. pg3200.txt +decree pg3200.txt, pg100.txt +decree, pg100.txt +decree. pg3200.txt, pg100.txt +decree." pg3200.txt +decree; pg100.txt +decree? pg100.txt +decreed pg3200.txt, pg100.txt +decreed, pg3200.txt, pg100.txt +decreed. pg3200.txt +decreed." pg3200.txt +decreeing pg3200.txt +decrees pg3200.txt, pg100.txt +decrees, pg100.txt +decrees. pg3200.txt, pg100.txt +decrees: pg100.txt +decrees; pg100.txt +decripitude. pg3200.txt +decrying pg3200.txt +dedans? pg3200.txt +dedicate pg100.txt +dedicated pg31100.txt, pg100.txt +dedicated: pg3200.txt +dedicates pg100.txt +dedication pg31100.txt, pg3200.txt, pg100.txt +deduce pg3200.txt +deducer, pg3200.txt +deducible pg3200.txt +deduction pg31100.txt +deduction. pg3200.txt +deductions. pg3200.txt +deductions?" pg3200.txt +deed pg31100.txt, pg3200.txt, pg100.txt +deed! pg100.txt +deed!' pg100.txt +deed's pg3200.txt, pg100.txt +deed). pg3200.txt +deed, pg3200.txt, pg100.txt +deed," pg3200.txt +deed- pg100.txt +deed--pity pg3200.txt +deed. pg3200.txt, pg100.txt +deed." pg3200.txt +deed: pg100.txt +deed; pg31100.txt, pg100.txt +deed? pg3200.txt, pg100.txt +deeds pg3200.txt, pg100.txt +deeds! pg100.txt +deeds, pg3200.txt, pg100.txt +deeds. pg3200.txt, pg100.txt +deeds; pg100.txt +deeds? pg100.txt +deef pg3200.txt +deem pg100.txt +deem. pg100.txt +deemed pg31100.txt +deemed, pg100.txt +deemed. pg100.txt +deep pg31100.txt, pg3200.txt, pg100.txt +deep! pg3200.txt +deep!' pg3200.txt +deep, pg31100.txt, pg3200.txt, pg100.txt +deep-toned pg3200.txt +deep. pg3200.txt, pg100.txt +deep." pg3200.txt +deep.") pg3200.txt +deep: pg3200.txt +deep; pg3200.txt +deep? pg100.txt +deepen pg3200.txt +deepened pg3200.txt +deepened, pg3200.txt +deepened. pg3200.txt +deepening pg3200.txt +deeper pg3200.txt +deeper, pg100.txt +deeper--a pg3200.txt +deeper. pg3200.txt +deepest pg31100.txt, pg3200.txt +deeply pg31100.txt, pg3200.txt +deeply, pg31100.txt, pg3200.txt, pg100.txt +deeply--me, pg3200.txt +deeply. pg31100.txt, pg100.txt +deeps pg3200.txt +deeps. pg3200.txt +deeps; pg100.txt +deepvow, pg100.txt +deer pg3200.txt, pg100.txt +deer! pg3200.txt +deer!" pg3200.txt +deer, pg3200.txt, pg100.txt +deer-steeling, pg3200.txt +deer. pg3200.txt, pg100.txt +deer? pg100.txt +deerslayer pg3200.txt +deesse? pg100.txt +deever, pg3200.txt +deever. pg3200.txt +defac'd pg100.txt +defac'd, pg100.txt +deface pg100.txt +deface, pg100.txt +defaced pg3200.txt, pg100.txt +default pg3200.txt, pg100.txt +default. pg100.txt +defaulter; pg3200.txt +defeat pg3200.txt, pg100.txt +defeat'st. pg100.txt +defeat, pg3200.txt, pg100.txt +defeat. pg3200.txt, pg100.txt +defeated pg3200.txt +defeated, pg3200.txt, pg100.txt +defeated. pg3200.txt +defeats, pg3200.txt +defect pg31100.txt, pg3200.txt, pg100.txt +defect, pg3200.txt, pg100.txt +defect--an pg3200.txt +defect. pg3200.txt, pg100.txt +defect: pg3200.txt +defection pg3200.txt +defective pg3200.txt +defective. pg3200.txt +defects pg31100.txt, pg3200.txt, pg100.txt +defects, pg100.txt +defects. pg31100.txt, pg3200.txt +defence pg31100.txt, pg100.txt +defence! pg100.txt +defence!- pg100.txt +defence, pg31100.txt, pg3200.txt, pg100.txt +defence. pg3200.txt, pg100.txt +defence.' pg100.txt +defence; pg31100.txt, pg100.txt +defence? pg100.txt +defences. pg100.txt +defend pg31100.txt, pg3200.txt, pg100.txt +defend, pg100.txt +defend. pg100.txt +defendant pg3200.txt +defendant, pg3200.txt, pg100.txt +defendant. pg3200.txt +defendant; pg100.txt +defendants pg3200.txt +defended pg100.txt +defending pg31100.txt, pg3200.txt, pg100.txt +defense pg3200.txt +defense, pg100.txt +defense. pg3200.txt +defense; pg3200.txt +defenses. pg3200.txt +defensible. pg100.txt +defer pg31100.txt, pg3200.txt +deference pg31100.txt, pg3200.txt +deference, pg3200.txt +deference-- pg3200.txt +deference. pg3200.txt +deference." pg31100.txt +deferential pg3200.txt +deferred pg3200.txt +defiance pg31100.txt, pg3200.txt, pg100.txt +defiance. pg31100.txt, pg3200.txt, pg100.txt +defiance; pg100.txt +defiant, pg3200.txt +deficiencies pg31100.txt, pg3200.txt +deficiencies, pg31100.txt +deficiency pg31100.txt +deficiency. pg31100.txt +deficient pg31100.txt +deficient. pg31100.txt +deficient." pg31100.txt +deficit." pg3200.txt +defied pg100.txt +defied, pg3200.txt, pg100.txt +defil'd pg100.txt +defil'd; pg100.txt +defile pg3200.txt +defile. pg100.txt +defile; pg100.txt +defiled pg3200.txt +defiled, pg3200.txt +defiler pg100.txt +defiling pg3200.txt +defiling; pg100.txt +define pg3200.txt +define, pg100.txt +defined pg31100.txt, pg3200.txt +defined, pg3200.txt +defined. pg3200.txt +defining pg3200.txt +definite pg3200.txt +definite, pg3200.txt +definite--nephi pg3200.txt +definite. pg3200.txt +definite." pg3200.txt +definitely pg3200.txt +definitely. pg3200.txt +definition pg3200.txt +definition. pg31100.txt +definition: pg3200.txt +definitive. pg100.txt +deflection?" pg3200.txt +deflower'd? pg100.txt +deform'd. pg100.txt +deform'd? pg100.txt +deformities pg3200.txt +deformities? pg100.txt +deformity, pg100.txt +deformity. pg100.txt +defrauded pg3200.txt +deft pg3200.txt +deftly pg3200.txt +deftness pg3200.txt +defy pg31100.txt, pg100.txt +defy, pg100.txt +defying pg100.txt +deg. pg3200.txt +deg., pg3200.txt +degenerate, pg100.txt +degenerate. pg100.txt +degenerate; pg100.txt +degeneration pg3200.txt +deggendorf." pg3200.txt +degradation pg3200.txt +degradation. pg31100.txt, pg3200.txt +degradation." pg31100.txt, pg3200.txt +degradation; pg3200.txt +degrade pg3200.txt +degraded pg3200.txt +degraded, pg3200.txt +degraded. pg3200.txt +degrades pg3200.txt +degrading pg3200.txt +degree pg31100.txt, pg3200.txt, pg100.txt +degree, pg31100.txt, pg3200.txt, pg100.txt +degree- pg100.txt +degree. pg31100.txt, pg3200.txt, pg100.txt +degree." pg3200.txt +degree.] pg3200.txt +degree; pg31100.txt, pg3200.txt, pg100.txt +degree? pg100.txt +degrees pg31100.txt, pg3200.txt, pg100.txt +degrees, pg3200.txt, pg100.txt +degrees--tough, pg3200.txt +degrees. pg100.txt +degrees." pg3200.txt +degrees? pg100.txt +dei pg3200.txt +deified. pg100.txt +deifying pg100.txt +deign pg3200.txt, pg100.txt +deiphobus pg100.txt +deiphobus, pg100.txt +deists, pg3200.txt +deity pg3200.txt, pg100.txt +deity! pg3200.txt +deity, pg3200.txt, pg100.txt +deity. pg3200.txt, pg100.txt +deity; pg3200.txt +deject. pg100.txt +dejected pg3200.txt +dejected. pg3200.txt +dejectedly pg3200.txt +dejectedly,-- pg3200.txt +dejectedly: pg3200.txt +dejection, pg31100.txt +dejection. pg31100.txt +dejection: pg3200.txt +delaford pg31100.txt +delaford!--but pg31100.txt +delaford. pg31100.txt +delagoa pg3200.txt +delamere, pg31100.txt +delaware, pg3200.txt +delay pg31100.txt, pg3200.txt, pg100.txt +delay'd, pg100.txt +delay'd. pg100.txt +delay, pg31100.txt, pg3200.txt, pg100.txt +delay. pg31100.txt, pg3200.txt, pg100.txt +delay." pg31100.txt, pg3200.txt +delay; pg3200.txt, pg100.txt +delay? pg100.txt +delayed pg31100.txt, pg3200.txt +delayed, pg3200.txt +delayed. pg31100.txt, pg3200.txt +delayed." pg3200.txt +delayed; pg31100.txt +delaying pg3200.txt +delays pg3200.txt +delays, pg31100.txt +delays. pg31100.txt, pg3200.txt, pg100.txt +delays; pg100.txt +delectable. pg100.txt +delegate pg3200.txt +delegate, pg3200.txt +delegate. pg3200.txt +delegates pg3200.txt +delegates, pg3200.txt +delegation pg3200.txt +delegation, pg3200.txt +delegation. pg3200.txt +deleted pg3200.txt +deleterious pg3200.txt +deletions pg31100.txt, pg3200.txt, pg100.txt +delhi pg3200.txt +delhi, pg3200.txt +deliberate pg3200.txt +deliberate, pg3200.txt, pg100.txt +deliberate: pg3200.txt +deliberately pg31100.txt, pg3200.txt +deliberately, pg31100.txt +deliberately--very pg3200.txt +deliberately. pg3200.txt +deliberateness pg3200.txt +deliberation-- pg31100.txt +deliberation: pg31100.txt, pg3200.txt +deliberations, pg3200.txt +deliberations. pg3200.txt +delibere). pg3200.txt +delibere): pg3200.txt +delicacies pg3200.txt +delicacies, pg3200.txt +delicacy pg31100.txt, pg3200.txt +delicacy!--and pg31100.txt +delicacy, pg31100.txt, pg3200.txt +delicacy--' pg3200.txt +delicacy--but pg31100.txt +delicacy. pg31100.txt, pg3200.txt +delicacy; pg3200.txt +delicate pg31100.txt, pg3200.txt, pg100.txt +delicate, pg3200.txt +delicate. pg100.txt +delicate." pg31100.txt, pg3200.txt +delicately pg31100.txt, pg3200.txt +delicates- pg100.txt +delicious pg3200.txt +delicious, pg3200.txt +delicious. pg3200.txt +deliciously pg3200.txt +deliciousness pg100.txt +delight pg31100.txt, pg3200.txt, pg100.txt +delight! pg31100.txt +delight" pg3200.txt +delight, pg31100.txt, pg3200.txt, pg100.txt +delight. pg31100.txt, pg3200.txt, pg100.txt +delight.--emma's pg31100.txt +delight: pg100.txt +delight; pg100.txt +delight? pg100.txt +delighted pg31100.txt, pg3200.txt +delighted! pg31100.txt +delighted, pg100.txt +delighted--i pg3200.txt +delighted. pg31100.txt, pg3200.txt, pg100.txt +delighted: pg31100.txt +delightful pg31100.txt, pg3200.txt +delightful!" pg31100.txt +delightful--and pg31100.txt +delightful. pg31100.txt, pg3200.txt +delightful; pg31100.txt +delightful? pg31100.txt +delightfulest pg3200.txt +delightfully pg3200.txt +delightfully," pg31100.txt +delightfully---i pg31100.txt +delightfulness--and pg3200.txt +delights pg31100.txt, pg3200.txt, pg100.txt +delights, pg100.txt +delights. pg100.txt +delineator pg3200.txt +delineator's pg3200.txt +delinquencies, pg3200.txt +delinquent pg3200.txt +delinquent. pg3200.txt +delirious pg3200.txt +delirious, pg3200.txt +deliriously pg3200.txt +delirium pg31100.txt, pg3200.txt +delirium. pg3200.txt +deliver pg31100.txt, pg3200.txt, pg100.txt +deliver! pg100.txt +deliver'd pg100.txt +deliver'd. pg100.txt +deliver'd? pg100.txt +deliver't? pg100.txt +deliver, pg100.txt +deliver. pg100.txt +deliver: pg100.txt +deliverable pg3200.txt +deliverance pg3200.txt, pg100.txt +deliverance! pg100.txt +deliverance, pg3200.txt +deliverance. pg31100.txt, pg100.txt +deliverance; pg100.txt +delivered pg31100.txt, pg3200.txt, pg100.txt +delivered, pg3200.txt, pg100.txt +delivered--and pg3200.txt +delivered. pg100.txt +delivered; pg3200.txt +delivered? pg100.txt +delivered?" pg3200.txt +deliverer pg3200.txt +deliverer. pg3200.txt +deliverers pg3200.txt +delivering pg3200.txt +delivers pg3200.txt, pg100.txt +delivery pg3200.txt +delivery, pg3200.txt +delivery. pg3200.txt, pg100.txt +delivery; pg3200.txt +delivery? pg100.txt +della pg3200.txt +dells, pg3200.txt +delmonico's pg3200.txt +delmonico's, pg3200.txt +delude pg3200.txt +delude." pg3200.txt +deluge pg3200.txt +deluge. pg3200.txt +deluged pg3200.txt +deluging pg3200.txt +delusion pg3200.txt +delusion, pg31100.txt +delusion--dreaded pg3200.txt +delusion. pg3200.txt +delusions." pg3200.txt +delusions.' pg3200.txt +delver! pg100.txt +dem pg3200.txt +dem. pg3200.txt +demand pg31100.txt, pg3200.txt, pg100.txt +demand, pg3200.txt, pg100.txt +demand. pg3200.txt, pg100.txt +demand: pg100.txt +demand; pg3200.txt +demand? pg100.txt +demanded pg31100.txt, pg3200.txt, pg100.txt +demanded, pg3200.txt, pg100.txt +demanded- pg100.txt +demanded. pg3200.txt, pg100.txt +demanding pg31100.txt +demands pg3200.txt, pg100.txt +demands, pg100.txt +demands. pg100.txt +demands; pg100.txt +deme pg3200.txt +demeanor pg3200.txt +demeanor. pg3200.txt +demeanour, pg100.txt +demented pg3200.txt +demerits pg100.txt +demerits. pg31100.txt +demetrius pg100.txt +demetrius! pg100.txt +demetrius'; pg100.txt +demetrius, pg100.txt +demetrius. pg100.txt +demetrius.' pg100.txt +demetrius; pg100.txt +demetrius? pg100.txt +demetrius] pg100.txt +demi-cannon. pg100.txt +demi-devil pg100.txt +demi-devil- pg100.txt +demi-god pg100.txt +demi-natur'd pg100.txt +demi-paradise, pg100.txt +demi-puppets pg100.txt +demigod; pg3200.txt +democracy pg3200.txt +democrat pg3200.txt +democrat!' pg3200.txt +democratic pg3200.txt +democrats pg3200.txt +democrats, pg3200.txt +democrats; pg3200.txt +demoiselle pg3200.txt +demon's pg3200.txt +demon." pg3200.txt +demon: pg3200.txt +demons." pg3200.txt +demonstrable pg3200.txt +demonstrable, pg3200.txt +demonstrate pg3200.txt, pg100.txt +demonstrate--that pg3200.txt +demonstrated pg3200.txt, pg100.txt +demonstrating pg3200.txt +demonstration pg3200.txt, pg100.txt +demonstration, pg3200.txt +demonstration. pg3200.txt +demonstration; pg3200.txt +demonstrations pg3200.txt +demonstrations," pg3200.txt +demonstrations? pg3200.txt +demonstrative; pg100.txt +demoralized pg3200.txt +demosthenes pg3200.txt +demosthenes, pg3200.txt +demure pg31100.txt, pg100.txt +demure, pg31100.txt +demure. pg31100.txt +demurely, pg100.txt +demurenesses pg3200.txt +den pg3200.txt +den, pg3200.txt, pg100.txt +den--" pg3200.txt +den--he pg3200.txt +den. pg3200.txt, pg100.txt +den." pg3200.txt +den.' pg100.txt +den?" pg3200.txt +denial pg31100.txt, pg3200.txt +denial. pg31100.txt, pg100.txt +denial; pg100.txt +denials pg100.txt +denied pg31100.txt, pg3200.txt, pg100.txt +denied, pg100.txt +denied--and pg31100.txt +denied. pg100.txt +denied; pg31100.txt, pg100.txt +denied? pg100.txt +denier, pg100.txt +denier. pg100.txt +denies pg3200.txt, pg100.txt +deniliquin pg3200.txt +denis pg3200.txt +denis. pg3200.txt +denis." pg3200.txt +denis?" pg3200.txt +denmark pg100.txt +denmark, pg3200.txt, pg100.txt +denmark. pg100.txt +denmark? pg100.txt +dennis pg100.txt +dennis! pg100.txt +denny pg100.txt +denny, pg31100.txt +denny? pg100.txt +denomination, pg31100.txt +denomination." pg3200.txt +denomination?" pg3200.txt +denominations, pg3200.txt +denote pg100.txt +denote, pg31100.txt, pg100.txt +denoted pg31100.txt +denotement pg100.txt +denotes pg3200.txt +denoting pg31100.txt +denoument pg3200.txt +denounce pg3200.txt +denounced pg3200.txt +denouncing pg3200.txt +dens pg3200.txt +dens, pg100.txt +dense pg3200.txt +dense, pg3200.txt +dense. pg3200.txt +densely pg3200.txt +densely, pg3200.txt +dent pg3200.txt +dental pg3200.txt +dentistry, pg3200.txt +dentistry. pg3200.txt +dents pg3200.txt +denuded pg3200.txt +denunciation. pg3200.txt +denver pg3200.txt +denver. pg3200.txt +deny pg31100.txt, pg3200.txt, pg100.txt +deny, pg100.txt +deny--would pg3200.txt +deny. pg31100.txt, pg3200.txt, pg100.txt +deny? pg100.txt +denying pg31100.txt, pg3200.txt, pg100.txt +depart pg3200.txt, pg100.txt +depart! pg3200.txt, pg100.txt +depart, pg3200.txt, pg100.txt +depart. pg3200.txt, pg100.txt +depart." pg3200.txt +depart; pg100.txt +depart] pg100.txt +departed pg3200.txt, pg100.txt +departed's.' pg3200.txt +departed, pg100.txt +departed. pg3200.txt +departed; pg3200.txt +departest, pg100.txt +departing pg3200.txt +departing. pg100.txt +department pg3200.txt +department, pg3200.txt +department. pg3200.txt +departments, pg3200.txt +departments. pg3200.txt +departs pg3200.txt +departure pg31100.txt, pg3200.txt, pg100.txt +departure, pg31100.txt, pg3200.txt, pg100.txt +departure. pg31100.txt, pg3200.txt, pg100.txt +departure; pg31100.txt +departure?" pg3200.txt +departures, pg31100.txt +depend pg31100.txt, pg3200.txt, pg100.txt +depend, pg100.txt +depend. pg31100.txt, pg100.txt +depend; pg100.txt +dependant. pg100.txt +dependants, pg100.txt +depended pg31100.txt, pg3200.txt +depended, pg31100.txt, pg3200.txt +depended. pg31100.txt, pg100.txt +depended; pg31100.txt +dependence pg31100.txt, pg100.txt +dependence! pg31100.txt +dependence." pg31100.txt +dependence:--"surely, pg31100.txt +dependence; pg31100.txt +dependency pg100.txt +dependent pg31100.txt, pg3200.txt +dependent, pg31100.txt +dependents. pg3200.txt +dependents? pg3200.txt +depending pg3200.txt, pg100.txt +depending, pg100.txt +depends pg31100.txt, pg3200.txt, pg100.txt +depends. pg100.txt +depends." pg31100.txt +depends: pg100.txt +depew pg3200.txt +depew!" pg3200.txt +depew] pg3200.txt +dephlogisticated pg3200.txt +deplorable. pg31100.txt +deplore pg3200.txt +deplore, pg31100.txt +deplore. pg100.txt +deployed pg3200.txt +depopulated pg3200.txt +deportment pg31100.txt, pg3200.txt +deportment? pg3200.txt +depos'd pg100.txt +depos'd, pg100.txt +depos'd. pg100.txt +depos'd: pg100.txt +depos'd; pg100.txt +depos'd? pg100.txt +depose pg100.txt +depose, pg100.txt +depose; pg100.txt +deposed. pg3200.txt +deposed? pg100.txt +deposit pg31100.txt +deposit, pg3200.txt +deposit. pg3200.txt +deposit." pg3200.txt +depositaries; pg100.txt +deposited pg31100.txt, pg3200.txt +depositings pg3200.txt +deposition pg3200.txt +deposition. pg3200.txt +depository pg3200.txt +deposits pg3200.txt +depot pg3200.txt +depot! pg3200.txt +depots, pg3200.txt +depraves? pg100.txt +depravities pg3200.txt +depravity: pg3200.txt +deprecated, pg31100.txt +deprecatingly: pg3200.txt +depreciation pg3200.txt +depreciation, pg3200.txt +depredations." pg3200.txt +depress. pg31100.txt, pg3200.txt +depressed pg3200.txt +depressed. pg31100.txt, pg3200.txt +depressed; pg3200.txt +depressing pg3200.txt +depressing, pg3200.txt +depressing. pg3200.txt +depression pg3200.txt +depression, pg3200.txt +depression; pg3200.txt +depressions pg3200.txt +depressions, pg3200.txt +deprevetur. pg3200.txt +deprivation pg3200.txt +deprivation. pg3200.txt +deprived pg31100.txt, pg3200.txt +depth pg3200.txt, pg100.txt +depths pg3200.txt +depths, pg3200.txt +depths. pg3200.txt +deputation pg100.txt +depute pg100.txt +deputies pg3200.txt +deputies. pg3200.txt +deputy pg100.txt +deputy), pg3200.txt +deputy, pg100.txt +deputy. pg100.txt +deputy? pg100.txt +der pg3200.txt +deracinate, pg100.txt +derby pg100.txt +derby, pg100.txt +derby. pg100.txt +derby? pg100.txt +derbyshire, pg31100.txt +derbyshire. pg31100.txt +derbyshire." pg31100.txt +derbyshire?" pg31100.txt +dercetas pg100.txt +dercetas; pg100.txt +dere pg100.txt +derelictions. pg3200.txt +deride, pg3200.txt +derided. pg3200.txt +derides. pg100.txt +deriding, pg3200.txt +derision pg31100.txt, pg3200.txt, pg100.txt +derision, pg3200.txt +derision-- pg3200.txt +derision. pg3200.txt +derision? pg100.txt +derisive pg3200.txt +deriv'd pg100.txt +deriv'd, pg100.txt +deriv'd. pg100.txt +deriv'd; pg100.txt +deriv'd? pg100.txt +derivative pg31100.txt, pg3200.txt, pg100.txt +derive pg31100.txt, pg3200.txt, pg100.txt +derive, pg100.txt +derive. pg100.txt +derive: pg100.txt +derived pg31100.txt, pg3200.txt, pg100.txt +derives pg100.txt +dern pg3200.txt +dern' pg3200.txt +derogate. pg100.txt +derogation pg100.txt +derrick, pg3200.txt +derrick. pg3200.txt +derriere, pg3200.txt +dervish pg3200.txt +dervish, pg3200.txt +dervish. pg3200.txt +dervishes pg3200.txt +des pg3200.txt +descant; pg100.txt +descend pg3200.txt, pg100.txt +descend, pg3200.txt, pg100.txt +descend. pg100.txt +descend; pg100.txt +descendant pg3200.txt +descendant!" pg3200.txt +descendant. pg31100.txt +descendants pg3200.txt +descendants. pg3200.txt +descended pg31100.txt, pg3200.txt, pg100.txt +descended, pg3200.txt, pg100.txt +descended. pg3200.txt +descended? pg100.txt +descendest, pg3200.txt +descending pg31100.txt, pg3200.txt +descending. pg3200.txt +descends pg3200.txt +descends. pg100.txt +descends] pg100.txt +descent pg31100.txt, pg3200.txt, pg100.txt +descent, pg3200.txt, pg100.txt +descent- pg100.txt +descent. pg31100.txt, pg3200.txt, pg100.txt +descent: pg100.txt +descent; pg100.txt +descent? pg100.txt +descent?" pg3200.txt +descents pg100.txt +describable pg3200.txt +describe pg31100.txt, pg3200.txt +describe. pg31100.txt, pg3200.txt +describe." pg31100.txt +describe; pg3200.txt +describe;' pg3200.txt +described pg31100.txt, pg3200.txt, pg100.txt +described! pg31100.txt +described, pg3200.txt +described. pg3200.txt +described." pg3200.txt +described? pg31100.txt +describes pg3200.txt +describes, pg100.txt +describing pg31100.txt, pg3200.txt +describing. pg3200.txt +describing?" pg3200.txt +descried pg31100.txt +descried. pg100.txt +descried; pg100.txt +description pg31100.txt, pg3200.txt, pg100.txt +description, pg31100.txt, pg3200.txt +description- pg100.txt +description-- pg3200.txt +description. pg31100.txt, pg3200.txt, pg100.txt +description." pg31100.txt, pg3200.txt +description.--julia pg31100.txt +description: pg3200.txt +descriptions pg31100.txt, pg3200.txt +descriptions. pg3200.txt +descriptions; pg31100.txt, pg3200.txt +descriptive pg31100.txt, pg3200.txt +descriptive. pg3200.txt +descriptive?" pg3200.txt +descry pg100.txt +descry. pg100.txt +desdemona pg100.txt +desdemona, pg100.txt +desdemona- pg100.txt +desdemona. pg100.txt +desdemona; pg100.txt +desdemona? pg100.txt +desdemonas, pg3200.txt +dese pg3200.txt +desecrate pg3200.txt +desecrated pg3200.txt +desecration pg3200.txt +desecration? pg3200.txt +desert pg3200.txt, pg100.txt +desert! pg100.txt +desert's pg3200.txt +desert, pg3200.txt, pg100.txt +desert- pg100.txt +desert--forty pg3200.txt +desert. pg3200.txt, pg100.txt +desert; pg3200.txt +desert? pg100.txt +desert?" pg3200.txt +deserted pg31100.txt, pg3200.txt +deserted!" pg3200.txt +deserted, pg3200.txt +deserted--a pg3200.txt +deserted. pg3200.txt +deserter pg3200.txt +deserter." pg3200.txt +deserting pg3200.txt +desertion pg3200.txt +desertion, pg31100.txt +deserts pg3200.txt, pg100.txt +deserts, pg31100.txt, pg100.txt +deserts. pg3200.txt, pg100.txt +deserts; pg100.txt +deserts? pg100.txt +deserv'd pg100.txt +deserv'd. pg100.txt +deserv'd; pg100.txt +deserv'st pg100.txt +deserve pg31100.txt, pg3200.txt, pg100.txt +deserve! pg31100.txt +deserve, pg31100.txt, pg100.txt +deserve-- pg100.txt +deserve. pg3200.txt, pg100.txt +deserve." pg31100.txt +deserve; pg3200.txt, pg100.txt +deserve?" pg31100.txt +deserved pg31100.txt, pg3200.txt +deserved, pg31100.txt, pg3200.txt, pg100.txt +deserved--" pg3200.txt +deserved. pg31100.txt, pg3200.txt +deserved; pg3200.txt +deserver pg100.txt +deserves pg31100.txt, pg3200.txt, pg100.txt +deserves. pg3200.txt +deserves." pg31100.txt +deserves.' pg100.txt +deservest pg100.txt +deserving pg31100.txt, pg3200.txt, pg100.txt +deserving, pg31100.txt +deserving. pg100.txt +deserving; pg3200.txt +deserving? pg100.txt +deservings! pg100.txt +deservings, pg100.txt +design pg31100.txt, pg3200.txt, pg100.txt +design'd, pg100.txt +design, pg100.txt +design," pg31100.txt +design--nature's pg3200.txt +design--to pg31100.txt +design. pg31100.txt, pg100.txt +design; pg31100.txt +designed pg31100.txt, pg3200.txt +designers pg3200.txt +designments pg100.txt +designs pg31100.txt, pg3200.txt, pg100.txt +designs! pg100.txt +designs, pg3200.txt, pg100.txt +designs. pg31100.txt, pg3200.txt, pg100.txt +designs?" pg3200.txt +desir'd pg100.txt +desir'd, pg100.txt +desir'd. pg100.txt +desir'd; pg100.txt +desir'st. pg100.txt +desirable pg31100.txt, pg3200.txt +desirable! pg31100.txt, pg3200.txt +desirable, pg3200.txt +desirable. pg31100.txt, pg3200.txt +desirable." pg31100.txt +desirable: pg31100.txt +desirableness pg31100.txt +desire pg31100.txt, pg3200.txt, pg100.txt +desire! pg100.txt +desire, pg31100.txt, pg3200.txt, pg100.txt +desire. pg31100.txt, pg3200.txt, pg100.txt +desire." pg31100.txt, pg3200.txt +desire.' pg100.txt +desire.'" pg3200.txt +desire: pg100.txt +desire; pg31100.txt, pg3200.txt, pg100.txt +desire? pg100.txt +desire?" pg31100.txt, pg3200.txt +desired pg31100.txt, pg3200.txt +desired, pg100.txt +desired. pg31100.txt, pg3200.txt +desired: pg31100.txt +desired; pg31100.txt +desires pg31100.txt, pg3200.txt, pg100.txt +desires, pg3200.txt, pg100.txt +desires- pg100.txt +desires--and pg3200.txt +desires. pg3200.txt, pg100.txt +desires; pg100.txt +desireth pg3200.txt +desiring pg31100.txt, pg3200.txt +desirous pg31100.txt, pg3200.txt, pg100.txt +desist pg3200.txt, pg100.txt +desisted. pg3200.txt +desk pg3200.txt, pg100.txt +desk, pg3200.txt +desk-banging, pg3200.txt +desk. pg3200.txt +desk? pg100.txt +desks pg3200.txt +desks, pg3200.txt +desolate pg31100.txt, pg3200.txt +desolate!" pg3200.txt +desolate, pg3200.txt +desolate. pg3200.txt, pg100.txt +desolating pg3200.txt +desolation pg3200.txt, pg100.txt +desolation! pg3200.txt +desolation, pg3200.txt +desolation. pg3200.txt, pg100.txt +desolation." pg3200.txt +desolation? pg100.txt +despair pg31100.txt, pg3200.txt, pg100.txt +despair! pg100.txt +despair, pg3200.txt, pg100.txt +despair-- pg3200.txt +despair. pg31100.txt, pg3200.txt, pg100.txt +despair." pg31100.txt, pg3200.txt +despair: pg100.txt +despair; pg31100.txt, pg100.txt +despair? pg100.txt +despaired pg31100.txt +despairing pg3200.txt +despairing: pg3200.txt +despairingly: pg3200.txt +despairs, pg100.txt +despatch, pg31100.txt, pg3200.txt +despatch. pg3200.txt +despatch: pg3200.txt +despatched pg31100.txt +despatches pg3200.txt +despatching pg3200.txt +desperado pg3200.txt +desperado. pg3200.txt +desperadoes pg3200.txt +desperadoes, pg3200.txt +desperate pg31100.txt, pg3200.txt, pg100.txt +desperate, pg3200.txt +desperate,' pg3200.txt +desperate-- pg3200.txt +desperate. pg3200.txt, pg100.txt +desperate." pg31100.txt, pg3200.txt +desperate.--money pg3200.txt +desperate; pg100.txt +desperately pg3200.txt, pg100.txt +desperately, pg3200.txt +desperation pg100.txt +desperation, pg100.txt +despicable pg3200.txt +despicable." pg31100.txt +despis'd! pg100.txt +despis'd? pg100.txt +despise pg31100.txt, pg3200.txt, pg100.txt +despise, pg100.txt +despise. pg100.txt +despise." pg3200.txt +despise; pg3200.txt +despise? pg31100.txt +despised pg31100.txt, pg3200.txt, pg100.txt +despised, pg31100.txt, pg100.txt +despised. pg3200.txt +despised; pg100.txt +despises pg3200.txt +despising pg100.txt +despising, pg100.txt +despite pg3200.txt, pg100.txt +despite, pg100.txt +despite. pg100.txt +despite; pg100.txt +despite? pg100.txt +despoiled pg3200.txt +despoilers pg3200.txt +despondence, pg31100.txt +despondence; pg31100.txt +despondency. pg3200.txt +despondency; pg3200.txt +despondent pg3200.txt +despondent, pg3200.txt +despondently pg3200.txt +despondingly, pg3200.txt +despotisms pg3200.txt +dessert pg31100.txt +dessert, pg3200.txt +dessert. pg31100.txt +desserts, pg3200.txt +destination pg3200.txt +destination! pg31100.txt +destination. pg3200.txt +destination.' pg3200.txt +destination; pg31100.txt +destined pg3200.txt +destinies pg100.txt +destiny pg31100.txt, pg3200.txt, pg100.txt +destiny, pg31100.txt, pg100.txt +destiny. pg31100.txt, pg100.txt +destiny? pg100.txt +destitute pg3200.txt +destitute, pg31100.txt +destitute; pg3200.txt +destroy pg31100.txt, pg3200.txt, pg100.txt +destroy'd pg100.txt +destroy'd. pg100.txt +destroy'd? pg100.txt +destroy, pg31100.txt, pg100.txt +destroy-- pg3200.txt +destroy. pg100.txt +destroy? pg100.txt +destroyed pg31100.txt, pg3200.txt +destroyed, pg3200.txt +destroyed--because pg3200.txt +destroyed. pg31100.txt, pg3200.txt +destroyed." pg3200.txt +destroyer pg3200.txt +destroyer, pg3200.txt +destroying pg3200.txt +destruction pg3200.txt, pg100.txt +destruction! pg100.txt +destruction, pg3200.txt, pg100.txt +destruction--the pg3200.txt +destruction. pg3200.txt, pg100.txt +destruction.] pg3200.txt +destruction; pg3200.txt +destructive pg31100.txt, pg3200.txt +destructive. pg3200.txt +desuetude. pg3200.txt +detach pg3200.txt +detached pg31100.txt, pg3200.txt +detached, pg3200.txt +detaching pg31100.txt, pg3200.txt +detachment pg3200.txt +detachment. pg3200.txt +detachments pg3200.txt +detail pg31100.txt, pg3200.txt +detail!" pg3200.txt +detail's pg3200.txt +detail, pg3200.txt +detail-- pg3200.txt +detail--saw pg3200.txt +detail--the pg3200.txt +detail. pg3200.txt +detail." pg3200.txt +detail: pg3200.txt +detail; pg31100.txt, pg3200.txt +detail?" pg3200.txt +detailed pg3200.txt +detailing pg31100.txt, pg3200.txt +details pg31100.txt, pg3200.txt +details!" pg3200.txt +details, pg3200.txt +details--but pg3200.txt +details--though pg3200.txt +details. pg3200.txt +details." pg3200.txt +details; pg3200.txt +detain pg3200.txt, pg100.txt +detain, pg100.txt +detained pg31100.txt, pg3200.txt +detaining pg31100.txt +detect pg3200.txt, pg100.txt +detect. pg3200.txt +detected pg31100.txt, pg3200.txt +detected. pg31100.txt, pg3200.txt +detecting pg3200.txt +detection pg3200.txt +detection, pg3200.txt +detective pg3200.txt +detective! pg3200.txt +detective. pg3200.txt +detective." pg3200.txt +detectives pg3200.txt +detector! pg100.txt +detects" pg3200.txt +detention, pg3200.txt +deter pg31100.txt +deteriorated pg3200.txt +deteriorated, pg3200.txt +deteriorates pg3200.txt +determin'd pg100.txt +determin'd. pg100.txt +determinable pg3200.txt +determinate pg100.txt +determinate. pg100.txt +determination pg31100.txt, pg3200.txt, pg100.txt +determination! pg100.txt +determination, pg31100.txt, pg3200.txt, pg100.txt +determination. pg3200.txt +determination: pg3200.txt +determination? pg100.txt +determinations, pg100.txt +determine pg31100.txt, pg3200.txt, pg100.txt +determine, pg100.txt +determine. pg31100.txt, pg100.txt +determine." pg31100.txt +determined pg31100.txt, pg3200.txt, pg100.txt +determined, pg31100.txt, pg3200.txt +determined. pg31100.txt, pg3200.txt +determined." pg31100.txt +determined; pg31100.txt +determinedly pg3200.txt +determines pg100.txt +determines, pg100.txt +determining pg31100.txt, pg3200.txt +deterred pg3200.txt +deterrent. pg3200.txt +detest pg3200.txt +detest. pg100.txt +detest: pg31100.txt +detestable pg31100.txt, pg3200.txt +detested pg31100.txt, pg3200.txt +detested- pg100.txt +detriment pg31100.txt +deucalion; pg100.txt +deuce-ace pg100.txt +deukalion. pg3200.txt +deum'; pg100.txt +deutsche pg3200.txt +deutschen!' pg3200.txt +deux pg3200.txt +devant pg100.txt +devastated pg3200.txt +devastates pg3200.txt +devastating pg3200.txt +devastation, pg3200.txt +devastation. pg3200.txt +develop pg3200.txt +develop--there pg3200.txt +develop. pg3200.txt +developed pg3200.txt +developing pg3200.txt +development pg31100.txt, pg3200.txt +development. pg3200.txt +development; pg3200.txt +developments pg3200.txt +developments. pg3200.txt +device pg3200.txt, pg100.txt +device, pg100.txt +device- pg100.txt +device. pg3200.txt, pg100.txt +device; pg100.txt +device? pg100.txt +devices pg31100.txt, pg3200.txt +devices, pg100.txt +devices-- pg3200.txt +devices. pg3200.txt, pg100.txt +devil pg31100.txt, pg3200.txt, pg100.txt +devil! pg3200.txt, pg100.txt +devil!" pg3200.txt +devil!' pg100.txt +devil's pg3200.txt, pg100.txt +devil, pg3200.txt, pg100.txt +devil- pg100.txt +devil--he pg3200.txt +devil--i pg3200.txt +devil-monk, pg100.txt +devil. pg3200.txt, pg100.txt +devil." pg3200.txt +devil.' pg100.txt +devil: pg100.txt +devil; pg100.txt +devil? pg100.txt +devil?" pg3200.txt +deviled pg3200.txt +devilish pg3200.txt +devilish. pg100.txt +devilment pg3200.txt +devilment, pg3200.txt +devils pg3200.txt +devils! pg100.txt +devils!" pg3200.txt +devils, pg3200.txt, pg100.txt +devils. pg100.txt +devils? pg3200.txt +devis'd pg100.txt +devis'd, pg100.txt +devis'd? pg100.txt +devise pg100.txt +devise. pg100.txt +devise.' pg100.txt +devise: pg100.txt +devise? pg100.txt +devised pg3200.txt +devised, pg100.txt +devised. pg31100.txt, pg3200.txt +devised; pg3200.txt +devises; pg100.txt +devising, pg3200.txt +devoid pg31100.txt +devolved pg3200.txt +devonshire pg31100.txt +devonshire, pg31100.txt, pg100.txt +devonshire. pg31100.txt +devonshire." pg3200.txt +devote pg3200.txt +devoted pg31100.txt, pg3200.txt, pg100.txt +devoted, pg31100.txt, pg3200.txt +devotedly; pg3200.txt +devotee pg3200.txt +devotees pg3200.txt +devotes pg3200.txt +devoting pg31100.txt +devotion pg31100.txt, pg3200.txt, pg100.txt +devotion! pg100.txt +devotion, pg3200.txt +devotion- pg100.txt +devotion. pg31100.txt, pg3200.txt, pg100.txt +devotion? pg3200.txt +devotions pg31100.txt +devotions, pg3200.txt +devotions--but pg3200.txt +devour pg3200.txt, pg100.txt +devour'd pg100.txt +devour'd. pg100.txt +devoured pg3200.txt +devouring pg3200.txt +devouring. pg100.txt +devours pg100.txt +devout pg3200.txt, pg100.txt +devout, pg100.txt +devoutly pg3200.txt +devoutly. pg3200.txt, pg100.txt +devylles pg3200.txt +dew pg3200.txt, pg100.txt +dew! pg100.txt +dew, pg3200.txt, pg100.txt +dew-fashioned, pg3200.txt +dew-laden pg3200.txt +dew. pg100.txt +dew; pg100.txt +dewberries, pg100.txt +dewdrops pg3200.txt +dewdrops, pg3200.txt +dewlap, pg3200.txt +dexter, pg3200.txt +dexterity pg31100.txt, pg100.txt +dexterous pg3200.txt +dey pg3200.txt +dey'd pg3200.txt +dey's pg3200.txt +deyselves, pg3200.txt +dhu, pg3200.txt +di'monds pg3200.txt +di'monds! pg3200.txt +di'monds!" pg3200.txt +di'monds, pg3200.txt +di'monds. pg3200.txt +di'monds." pg3200.txt +diable! pg100.txt +diadem pg100.txt +diadem, pg100.txt +diadem. pg100.txt +diadem; pg100.txt +diagnosis. pg3200.txt +diagonal pg3200.txt +diagrams. pg3200.txt +dial pg100.txt +dialect pg3200.txt, pg100.txt +dialect; pg3200.txt +dialects. pg3200.txt +dialogue pg31100.txt +dialogue, pg31100.txt +dialogue. pg3200.txt, pg100.txt +diameter, pg3200.txt, pg100.txt +diameter. pg3200.txt +diamond pg3200.txt, pg100.txt +diamond" pg3200.txt +diamond-like pg3200.txt +diamonds pg31100.txt, pg3200.txt +diamonds! pg100.txt +diamonds, pg3200.txt +diamonds- pg100.txt +diamonds. pg31100.txt, pg3200.txt +diamonds." pg3200.txt +dian pg100.txt +diana pg3200.txt, pg100.txt +diana, pg100.txt +diana. pg100.txt +diaper, pg100.txt +diaries,--and pg3200.txt +diarist pg3200.txt +diary pg3200.txt +diary" pg3200.txt +diary, pg3200.txt +diary-- pg3200.txt +diary. pg3200.txt +diary: pg3200.txt +dice pg100.txt +dice! pg100.txt +dice; pg100.txt +dich pg3200.txt +dich, pg3200.txt +dich-- pg3200.txt +dich. pg3200.txt +dick pg31100.txt, pg3200.txt +dick, pg31100.txt, pg3200.txt, pg100.txt +dickens pg3200.txt +dickens's pg3200.txt +dickens. pg3200.txt +dickering pg3200.txt +dickinson's pg3200.txt +dickinson, pg3200.txt +dicta pg3200.txt +dictate, pg31100.txt +dictated pg3200.txt +dictated, pg31100.txt +dictated. pg3200.txt +dictating pg3200.txt +dictating. pg3200.txt +dictation, pg3200.txt +dictations. pg3200.txt +dictator's pg3200.txt +dictator, pg100.txt +dictatorship pg3200.txt +dictionary pg3200.txt +dictionary, pg3200.txt +dictionary. pg3200.txt +dictionary." pg3200.txt +dictynna? pg100.txt +did! pg31100.txt, pg3200.txt, pg100.txt +did!" pg3200.txt +did, pg31100.txt, pg3200.txt, pg100.txt +did- pg100.txt +did--" pg3200.txt +did--because pg3200.txt +did--but pg31100.txt, pg3200.txt +did--our pg3200.txt +did. pg31100.txt, pg3200.txt, pg100.txt +did." pg31100.txt, pg3200.txt +did.' pg3200.txt +did: pg3200.txt, pg100.txt +did; pg31100.txt, pg3200.txt, pg100.txt +did? pg31100.txt, pg3200.txt, pg100.txt +did?" pg3200.txt +did?' pg3200.txt +didn' pg3200.txt +didn't pg3200.txt +didn't!" pg3200.txt +didn't, pg3200.txt +didn't. pg3200.txt +didn't." pg3200.txt +didn't; pg3200.txt +didn't? pg3200.txt +didn't?" pg3200.txt +dido! pg100.txt +dido. pg100.txt +didst pg100.txt +didst, pg100.txt +didst. pg100.txt +die pg31100.txt, pg3200.txt, pg100.txt +die! pg3200.txt, pg100.txt +die!" pg3200.txt +die!' pg3200.txt +die, pg31100.txt, pg3200.txt, pg100.txt +die- pg100.txt +die--it pg3200.txt +die. pg3200.txt, pg100.txt +die." pg3200.txt +die.' pg3200.txt, pg100.txt +die: pg3200.txt, pg100.txt +die; pg31100.txt, pg3200.txt, pg100.txt +die? pg3200.txt, pg100.txt +die?" pg3200.txt +died pg31100.txt, pg3200.txt, pg100.txt +died!' pg3200.txt +died) pg31100.txt +died, pg31100.txt, pg3200.txt, pg100.txt +died- pg100.txt +died--. pg31100.txt +died. pg31100.txt, pg3200.txt, pg100.txt +died." pg31100.txt, pg3200.txt +died: pg3200.txt +died; pg3200.txt, pg100.txt +died? pg100.txt +died?" pg31100.txt, pg3200.txt +dieing pg31100.txt +dieing!" pg31100.txt +dien'? pg3200.txt +dienst pg3200.txt +dies pg3200.txt, pg100.txt +dies! pg100.txt +dies, pg3200.txt, pg100.txt +dies. pg3200.txt, pg100.txt +dies." pg31100.txt +dies.--he pg31100.txt +dies] pg100.txt +diese pg3200.txt +diest pg100.txt +diest! pg100.txt +diest. pg100.txt +diest; pg100.txt +diet pg100.txt +diet, pg31100.txt, pg3200.txt, pg100.txt +diet. pg3200.txt, pg100.txt +dieted pg100.txt +dieter. pg100.txt +diets pg3200.txt +dieu! pg100.txt +dieu, pg3200.txt, pg100.txt +differ pg3200.txt +differ, pg3200.txt, pg100.txt +differ. pg3200.txt, pg100.txt +differed pg31100.txt, pg3200.txt +differed; pg31100.txt +difference pg31100.txt, pg3200.txt, pg100.txt +difference, pg3200.txt, pg100.txt +difference. pg31100.txt, pg3200.txt, pg100.txt +difference." pg31100.txt, pg3200.txt +difference; pg31100.txt, pg3200.txt +difference? pg3200.txt, pg100.txt +difference?" pg3200.txt +difference?) pg3200.txt +differences pg3200.txt +differences?" pg3200.txt +different pg31100.txt, pg3200.txt +different! pg31100.txt +different, pg31100.txt, pg3200.txt, pg100.txt +different. pg31100.txt, pg3200.txt, pg100.txt +different." pg31100.txt, pg3200.txt +different; pg31100.txt, pg3200.txt +different? pg3200.txt +different?' pg3200.txt +differentiate pg3200.txt +differentiated pg3200.txt +differentiates pg3200.txt +differently pg31100.txt, pg3200.txt +differently, pg31100.txt +differently. pg31100.txt, pg3200.txt +differently." pg31100.txt, pg3200.txt +differently; pg31100.txt +differing pg31100.txt, pg3200.txt +differs pg3200.txt +difficult pg31100.txt, pg3200.txt +difficult, pg3200.txt +difficult. pg3200.txt +difficult." pg3200.txt +difficult; pg31100.txt +difficult?" pg3200.txt +difficulties pg31100.txt, pg3200.txt, pg100.txt +difficulties, pg31100.txt, pg3200.txt, pg100.txt +difficulties. pg31100.txt, pg3200.txt +difficulties?" pg31100.txt +difficulty pg31100.txt, pg3200.txt +difficulty, pg31100.txt, pg3200.txt +difficulty. pg3200.txt +difficulty." pg3200.txt +difficulty.) pg3200.txt +difficulty: pg3200.txt +difficulty; pg31100.txt, pg3200.txt +difficulty?" pg31100.txt +diffidence pg31100.txt, pg3200.txt +diffidence, pg31100.txt +diffidence-- pg3200.txt +diffidence. pg3200.txt, pg100.txt +diffidence: pg3200.txt +diffidence; pg100.txt +diffident pg31100.txt, pg3200.txt +diffident, pg3200.txt +diffidently-- pg3200.txt +diffuse, pg3200.txt +diffuse." pg31100.txt +diffused pg31100.txt +diffused, pg31100.txt +diffuseness pg31100.txt +dig pg3200.txt +dig!" pg3200.txt +dig, pg3200.txt +dig; pg100.txt +digest pg100.txt +digested pg3200.txt +digested, pg100.txt +digested; pg3200.txt +digestion. pg3200.txt +digestions; pg100.txt +digg'd pg100.txt +digger pg3200.txt +digger. pg3200.txt +digging pg3200.txt +digging. pg3200.txt +digging." pg3200.txt +diggings pg3200.txt +diggings" pg3200.txt +diggings. pg3200.txt +dignified pg3200.txt +dignified, pg31100.txt, pg3200.txt +dignified. pg100.txt +dignified: pg3200.txt, pg100.txt +dignifies pg3200.txt +dignitaries pg3200.txt +dignitary pg3200.txt +dignitary, pg3200.txt +dignities pg3200.txt, pg100.txt +dignities, pg3200.txt, pg100.txt +dignities. pg3200.txt, pg100.txt +dignities; pg100.txt +dignity pg31100.txt, pg3200.txt, pg100.txt +dignity! pg3200.txt +dignity!" pg3200.txt +dignity, pg31100.txt, pg3200.txt, pg100.txt +dignity-- pg3200.txt +dignity--taking pg3200.txt +dignity. pg31100.txt, pg3200.txt, pg100.txt +dignity." pg3200.txt +dignity.' pg3200.txt +dignity: pg3200.txt, pg100.txt +dignity; pg3200.txt, pg100.txt +dignity? pg3200.txt, pg100.txt +digress, pg100.txt +digress. pg3200.txt +digression pg31100.txt, pg3200.txt +dijon, pg3200.txt +dikes pg3200.txt +dilapidated pg3200.txt +dilate, pg100.txt +dilatory, pg31100.txt +dilemma. pg100.txt +dilettantenaufdringlichkeiten. pg3200.txt +diligence pg31100.txt, pg3200.txt, pg100.txt +diligence! pg100.txt +diligence, pg3200.txt, pg100.txt +diligence-time pg3200.txt +diligence. pg3200.txt, pg100.txt +diligences. pg3200.txt +diligent pg3200.txt +diligent, pg3200.txt, pg100.txt +diligent. pg100.txt +diligently pg3200.txt +diligently, pg3200.txt +diligently. pg3200.txt +dilkins, pg3200.txt +dilkoosha. pg3200.txt +dilsberg pg3200.txt +dilsberg. pg3200.txt +dilsberg] pg3200.txt +dilworthy pg3200.txt +dilworthy's. pg3200.txt +dilworthy, pg3200.txt +dilworthy. pg3200.txt +dilworthy: pg3200.txt +dilworthys." pg3200.txt +dim pg31100.txt, pg3200.txt, pg100.txt +dim, pg3200.txt +dim. pg3200.txt, pg100.txt +dim: pg3200.txt +dime pg3200.txt +dime! pg3200.txt +dimensions pg31100.txt, pg3200.txt +dimensions. pg3200.txt +diminish pg3200.txt, pg100.txt +diminish, pg3200.txt, pg100.txt +diminished pg31100.txt, pg3200.txt +diminished, pg3200.txt +diminished. pg3200.txt +diminishes, pg3200.txt +diminishing pg3200.txt +diminishing, pg3200.txt, pg100.txt +diminishing. pg3200.txt +diminushed--that pg31100.txt +diminushing. pg31100.txt +diminution pg100.txt +diminutive pg3200.txt, pg100.txt +diminutives; pg3200.txt +dimly pg31100.txt, pg3200.txt +dimmed pg3200.txt +dimmed, pg100.txt +dimming pg3200.txt +dimness, pg3200.txt +dimpled, pg3200.txt +dimsdale's pg3200.txt +din pg3200.txt, pg100.txt +din'd pg100.txt +din'd, pg100.txt +din'd. pg100.txt +din'd; pg100.txt +din, pg3200.txt +din---- pg3200.txt +din. pg3200.txt, pg100.txt +din: pg3200.txt +din? pg100.txt +dinadan pg3200.txt +dinapore, pg3200.txt +dine pg31100.txt, pg3200.txt, pg100.txt +dine' pg3200.txt +dine, pg31100.txt, pg100.txt +dine." pg31100.txt +dined pg31100.txt, pg3200.txt +dined, pg31100.txt, pg3200.txt +dined- pg100.txt +diner. pg100.txt +diners pg3200.txt +diners. pg3200.txt +ding-dong.] pg100.txt +ding. pg100.txt +dingo--a pg3200.txt +dingo. pg3200.txt +dingy pg3200.txt +dingy, pg3200.txt +dining pg31100.txt, pg3200.txt +dining, pg3200.txt +dining-chambers. pg100.txt +dining-houses. pg3200.txt +dining-parlour pg31100.txt +dining-parlour. pg31100.txt +dining-room pg3200.txt +dining-room, pg31100.txt, pg3200.txt +dining-room--hated pg31100.txt +dining. pg3200.txt +dining." pg31100.txt +dinner pg31100.txt, pg3200.txt, pg100.txt +dinner! pg100.txt +dinner!"--why pg3200.txt +dinner, pg31100.txt, pg3200.txt, pg100.txt +dinner- pg3200.txt +dinner-bell pg31100.txt +dinner-gong pg3200.txt +dinner-parties pg31100.txt +dinner-party, pg31100.txt +dinner-party. pg31100.txt +dinner-table, pg31100.txt +dinner-time, pg100.txt +dinner-time. pg31100.txt, pg100.txt +dinner-time; pg100.txt +dinner. pg31100.txt, pg3200.txt, pg100.txt +dinner." pg31100.txt, pg3200.txt +dinner.' pg100.txt +dinner.--to pg31100.txt +dinner: pg3200.txt +dinner; pg31100.txt, pg3200.txt, pg100.txt +dinner? pg100.txt +dinner?" pg3200.txt +dinnerhorn, pg3200.txt +dinnerless. pg3200.txt +dinners pg31100.txt, pg3200.txt +dinners, pg31100.txt, pg3200.txt, pg100.txt +dinners--at pg3200.txt +dinners. pg3200.txt +dinnertime? pg100.txt +dint pg31100.txt, pg3200.txt +dio. pg3200.txt +diomed pg100.txt +diomed! pg100.txt +diomed, pg100.txt +diomed. pg100.txt +diomede pg100.txt +diomede. pg3200.txt +diomedes pg100.txt +diomedes, pg100.txt +diomedes. pg100.txt +dion pg100.txt +dion, pg100.txt +dip pg3200.txt +diphtheria. pg3200.txt +diploma, pg3200.txt +diploma. pg3200.txt +diplomacy pg3200.txt +diplomacy, pg3200.txt +diplomacy--' pg3200.txt +diplomacy--those pg3200.txt +diplomacy. pg3200.txt +diplomas pg3200.txt +diplomat, pg3200.txt +diplomatic pg3200.txt +dipped pg3200.txt +dipper pg3200.txt +dipperful pg3200.txt +dips pg3200.txt +dips, pg3200.txt +diramic pg3200.txt +dire pg3200.txt +direct pg31100.txt, pg3200.txt, pg100.txt +direct, pg100.txt +direct. pg100.txt +direct." pg31100.txt +directed pg31100.txt, pg3200.txt, pg100.txt +directed, pg3200.txt, pg100.txt +directed. pg100.txt +directing pg31100.txt +directing, pg100.txt +direction pg31100.txt, pg3200.txt, pg100.txt +direction, pg31100.txt, pg3200.txt, pg100.txt +direction-giver, pg100.txt +direction. pg31100.txt, pg3200.txt, pg100.txt +direction." pg31100.txt +direction: pg3200.txt +direction; pg31100.txt, pg3200.txt +direction?" pg3200.txt +directions pg31100.txt, pg3200.txt +directions, pg31100.txt, pg3200.txt +directions--everywhere pg3200.txt +directions. pg31100.txt, pg3200.txt, pg100.txt +directions; pg3200.txt +directions? pg100.txt +directions?" pg3200.txt +directitude. pg100.txt +directly pg31100.txt, pg3200.txt, pg100.txt +directly!" pg31100.txt +directly, pg31100.txt +directly. pg31100.txt, pg3200.txt, pg100.txt +directly." pg31100.txt, pg3200.txt +directly; pg31100.txt +directly;" pg31100.txt +directness-- pg3200.txt +director pg31100.txt, pg3200.txt, pg100.txt +director." pg3200.txt +director?' pg3200.txt +directors pg3200.txt +directors"--which pg3200.txt +directors, pg3200.txt +directors. pg3200.txt +directors." pg3200.txt +directors; pg3200.txt +directory pg3200.txt +direful. pg100.txt +dirigible--but pg3200.txt +dirk pg3200.txt +dirt pg31100.txt, pg3200.txt, pg100.txt +dirt! pg3200.txt +dirt, pg3200.txt, pg100.txt +dirt--everything. pg3200.txt +dirt-rotten pg100.txt +dirt. pg100.txt +dirt." pg31100.txt, pg3200.txt +dirt; pg3200.txt +dirty pg31100.txt, pg3200.txt +dirty, pg3200.txt +dirty. pg3200.txt +dirty." pg31100.txt, pg3200.txt +dis pg3200.txt +dis!" pg3200.txt +dis---- pg3200.txt +dis. pg3200.txt +disability pg3200.txt +disability; pg100.txt +disabled pg3200.txt, pg100.txt +disabled, pg3200.txt +disadvantage pg31100.txt, pg3200.txt +disadvantage. pg3200.txt +disadvantage: pg3200.txt +disadvantage? pg31100.txt +disadvantages--in pg3200.txt +disaffected pg3200.txt +disagreable pg31100.txt +disagree pg31100.txt +disagree, pg100.txt +disagree." pg31100.txt +disagreeable pg31100.txt, pg3200.txt +disagreeable, pg31100.txt +disagreeable," pg3200.txt +disagreeable--i pg31100.txt +disagreeable. pg31100.txt, pg3200.txt +disagreeable." pg31100.txt +disagreeable?" pg31100.txt +disagreeableness pg31100.txt +disagreeably, pg3200.txt +disagreed pg3200.txt +disagreement pg31100.txt +disagreement, pg31100.txt +disagreement. pg3200.txt +disagreement; pg3200.txt +disallow pg31100.txt +disallow, pg31100.txt +disannul, pg100.txt +disapp'intment." pg3200.txt +disappear pg3200.txt +disappear, pg3200.txt +disappear. pg3200.txt +disappearance pg3200.txt +disappearance. pg31100.txt +disappeared pg31100.txt, pg3200.txt +disappeared, pg31100.txt, pg3200.txt +disappeared. pg31100.txt, pg3200.txt +disappeared." pg3200.txt +disappeared.' pg3200.txt +disappeared; pg3200.txt +disappeared? pg3200.txt +disappearing pg3200.txt +disappearing. pg31100.txt, pg3200.txt +disappears pg3200.txt +disappears. pg3200.txt +disappears." pg3200.txt +disappears.' pg3200.txt +disappointed pg31100.txt, pg3200.txt +disappointed, pg31100.txt, pg3200.txt +disappointed. pg31100.txt, pg3200.txt +disappointed." pg31100.txt, pg3200.txt +disappointed; pg31100.txt, pg3200.txt +disappointed] pg3200.txt +disappointing, pg3200.txt +disappointing. pg3200.txt +disappointment pg31100.txt, pg3200.txt +disappointment! pg31100.txt +disappointment!" pg31100.txt, pg3200.txt +disappointment, pg31100.txt, pg3200.txt +disappointment-- pg3200.txt +disappointment--bitter." pg3200.txt +disappointment--for pg3200.txt +disappointment. pg31100.txt, pg3200.txt +disappointment." pg31100.txt, pg3200.txt +disappointment.--the pg31100.txt +disappointment: pg31100.txt, pg3200.txt +disappointment; pg31100.txt +disappointment?" pg31100.txt +disappointments pg31100.txt +disappointments, pg31100.txt +disappointments; pg3200.txt +disappoints pg3200.txt +disapprobation, pg31100.txt +disapprobation. pg31100.txt +disapproval pg3200.txt +disapproval--" pg3200.txt +disapproval; pg3200.txt +disapproval?" pg3200.txt +disapprove pg31100.txt, pg3200.txt +disapproved pg31100.txt, pg3200.txt +disapproved, pg3200.txt +disapproved. pg31100.txt +disapproving pg3200.txt +disapproving, pg31100.txt +disarm. pg3200.txt +disarmed pg31100.txt +disarmed. pg100.txt +disaster pg31100.txt, pg3200.txt +disaster! pg3200.txt +disaster!" pg3200.txt +disaster, pg3200.txt +disaster--my pg3200.txt +disaster. pg3200.txt, pg100.txt +disasters pg3200.txt, pg100.txt +disasters, pg3200.txt +disasters: pg3200.txt +disastrous pg31100.txt, pg3200.txt +disastrous. pg3200.txt +disband. pg3200.txt +disbelief pg31100.txt, pg3200.txt +disbelieve pg3200.txt +disbelieving pg31100.txt +disbranch pg100.txt +disburdened pg3200.txt +discard pg3200.txt +discard, pg3200.txt +discarded pg3200.txt +discarded. pg3200.txt +discern pg31100.txt, pg3200.txt, pg100.txt +discern, pg100.txt +discern. pg31100.txt +discerned pg31100.txt, pg3200.txt +discerned, pg3200.txt +discerned. pg3200.txt +discerner pg100.txt +discerning pg31100.txt, pg100.txt +discerning;--what pg31100.txt +discernings pg100.txt +discernment pg31100.txt +discharg'd! pg100.txt +discharg'd, pg100.txt +discharg'd. pg100.txt +discharg'd] pg100.txt +discharge pg31100.txt, pg3200.txt, pg100.txt +discharge. pg100.txt +discharge; pg100.txt +discharged pg31100.txt, pg3200.txt, pg100.txt +discharged, pg31100.txt +discharged. pg31100.txt, pg3200.txt +discharges: pg3200.txt +discharging pg31100.txt, pg3200.txt +disciple pg3200.txt +disciple, pg3200.txt +disciples pg3200.txt +disciples. pg3200.txt +disciples; pg3200.txt +discipleship--and pg3200.txt +discipline pg31100.txt, pg3200.txt, pg100.txt +discipline! pg100.txt +discipline, pg3200.txt, pg100.txt +discipline. pg100.txt +disciplines pg100.txt +disciplines, pg100.txt +disclaim pg31100.txt, pg3200.txt, pg100.txt +disclaim'st pg100.txt +disclaimers pg100.txt +disclaiming pg31100.txt, pg3200.txt +disclos'd, pg100.txt +disclos'd. pg100.txt +disclos'd; pg100.txt +disclose pg3200.txt, pg100.txt +disclosed pg3200.txt +disclosed, pg100.txt +discloses: pg100.txt +disclosing pg31100.txt, pg3200.txt +disclosure pg31100.txt +disclosure; pg31100.txt +discolor pg3200.txt +discolored pg3200.txt +discomfit pg100.txt +discomfited pg31100.txt +discomfited. pg100.txt +discomfited; pg100.txt +discomfiture: pg100.txt +discomfort pg3200.txt +discomfort, pg3200.txt +discomfort. pg3200.txt, pg100.txt +discomfort; pg3200.txt +discomforted pg3200.txt +discomforting pg3200.txt +discommoded pg3200.txt +discommoded. pg3200.txt +discompose pg31100.txt, pg3200.txt +discomposed pg31100.txt +disconcerted pg3200.txt +disconcerted; pg31100.txt +disconnect pg3200.txt +disconnected pg3200.txt +disconsolate, pg100.txt +disconsolate. pg3200.txt +disconsolating pg3200.txt +discontent pg31100.txt, pg100.txt +discontent, pg100.txt +discontent. pg31100.txt, pg3200.txt, pg100.txt +discontent." pg3200.txt +discontent; pg100.txt +discontent? pg100.txt +discontented pg3200.txt +discontented, pg31100.txt +discontented. pg3200.txt, pg100.txt +discontented?" pg3200.txt +discontents pg100.txt +discontents, pg100.txt +discontents. pg100.txt +discontents." pg31100.txt +discontinuance. pg31100.txt +discontinued, pg3200.txt +discontinued. pg3200.txt +discontinuing pg31100.txt +discord? pg100.txt +discordant pg3200.txt +discount pg3200.txt +discount. pg3200.txt +discount." pg3200.txt +discourage pg3200.txt +discouraged pg3200.txt +discouraged, pg31100.txt, pg3200.txt +discouraged. pg3200.txt +discouraged; pg3200.txt +discouragement, pg31100.txt +discouragements pg3200.txt +discouragements. pg3200.txt +discouraging, pg3200.txt +discouraging. pg3200.txt +discourse pg31100.txt, pg3200.txt, pg100.txt +discourse, pg31100.txt, pg100.txt +discourse. pg31100.txt, pg3200.txt, pg100.txt +discourse: pg100.txt +discourse? pg100.txt +discourses pg3200.txt +discourteously pg3200.txt +discourtesy pg3200.txt, pg100.txt +discover pg31100.txt, pg3200.txt, pg100.txt +discover', pg3200.txt +discover'd pg100.txt +discover'd, pg100.txt +discover, pg31100.txt, pg3200.txt, pg100.txt +discover. pg3200.txt, pg100.txt +discoverable pg3200.txt +discovered pg31100.txt, pg3200.txt, pg100.txt +discovered! pg100.txt +discovered, pg3200.txt +discovered--fear. pg3200.txt +discovered. pg3200.txt, pg100.txt +discovered." pg3200.txt +discovered; pg3200.txt +discovered? pg31100.txt +discoverer pg3200.txt +discoverers pg3200.txt +discoveries pg31100.txt, pg3200.txt, pg100.txt +discoveries! pg100.txt +discoveries!" pg3200.txt +discoveries. pg3200.txt +discoveries." pg31100.txt +discovering pg31100.txt, pg3200.txt +discovers pg3200.txt, pg100.txt +discovers, pg100.txt +discovery pg31100.txt, pg3200.txt, pg100.txt +discovery, pg31100.txt, pg3200.txt, pg100.txt +discovery-voyage pg3200.txt +discovery. pg31100.txt, pg3200.txt, pg100.txt +discovery; pg3200.txt +discredit pg31100.txt, pg3200.txt +discredited pg100.txt +discredits. pg100.txt +discreet, pg100.txt +discreet. pg3200.txt +discreet; pg100.txt +discreet? pg31100.txt +discrepancy. pg3200.txt +discrepantly." pg3200.txt +discretion pg31100.txt, pg3200.txt, pg100.txt +discretion, pg31100.txt, pg100.txt +discretion. pg31100.txt, pg3200.txt, pg100.txt +discretion; pg31100.txt, pg100.txt +discretion? pg100.txt +discriminate pg3200.txt +discriminates. pg3200.txt +discriminating pg3200.txt +discrimination pg3200.txt +discrimination, pg3200.txt +discrimination. pg3200.txt +discrimination; pg31100.txt +discuss pg31100.txt, pg3200.txt +discuss, pg3200.txt +discuss. pg100.txt +discuss." pg31100.txt, pg3200.txt +discussed pg31100.txt, pg3200.txt +discussed, pg31100.txt, pg3200.txt +discussed; pg31100.txt +discussing. pg3200.txt +discussion pg31100.txt, pg3200.txt +discussion, pg3200.txt +discussion-mortar. pg3200.txt +discussion. pg31100.txt, pg3200.txt +discussion." pg3200.txt +discussion.] pg3200.txt +discussion; pg31100.txt, pg3200.txt +disdain pg31100.txt, pg100.txt +disdain'd pg100.txt +disdain'd; pg100.txt +disdain, pg31100.txt, pg100.txt +disdain. pg31100.txt, pg100.txt +disdain: pg3200.txt, pg100.txt +disdain; pg100.txt +disdaineth, pg100.txt +disdainful pg100.txt +disdainful, pg100.txt +disdainful. pg100.txt +disdains pg100.txt +disdains. pg100.txt +diseas'd pg100.txt +disease pg3200.txt, pg100.txt +disease, pg3200.txt, pg100.txt +disease- pg100.txt +disease. pg3200.txt, pg100.txt +disease." pg3200.txt +disease; pg3200.txt, pg100.txt +disease?" pg3200.txt +diseased pg3200.txt +diseased, pg3200.txt, pg100.txt +diseases pg3200.txt, pg100.txt +diseases, pg3200.txt +diseases. pg100.txt +disedg'd pg100.txt +disembark pg100.txt +disembarrass pg3200.txt +disembowled, pg3200.txt +disenchant pg3200.txt +disengaged pg31100.txt +disengaging pg31100.txt +disfavour; pg31100.txt +disfigure pg100.txt +disfigured pg31100.txt, pg3200.txt +disfigured; pg100.txt +disfigurement. pg3200.txt +disfiguring pg3200.txt +disgorge pg100.txt +disgorged pg3200.txt +disgrac'd pg100.txt +disgrac'd, pg100.txt +disgrac'd. pg100.txt +disgrace pg31100.txt, pg3200.txt, pg100.txt +disgrace! pg100.txt +disgrace, pg31100.txt, pg100.txt +disgrace--" pg3200.txt +disgrace. pg3200.txt, pg100.txt +disgrace." pg3200.txt +disgrace: pg100.txt +disgrace?" pg31100.txt +disgraced pg31100.txt, pg3200.txt +disgraced, pg100.txt +disgraced. pg3200.txt +disgraced." pg31100.txt +disgraceful pg31100.txt, pg3200.txt +disgraceful! pg3200.txt +disgraces pg100.txt +disgraces, pg100.txt +disgracing pg31100.txt, pg3200.txt +disguis'd! pg100.txt +disguis'd, pg100.txt +disguis'd. pg100.txt +disguis'd? pg100.txt +disguise pg3200.txt, pg100.txt +disguise!" pg3200.txt +disguise, pg3200.txt, pg100.txt +disguise-- pg3200.txt +disguise. pg3200.txt +disguise." pg31100.txt, pg3200.txt +disguise; pg3200.txt +disguised pg31100.txt, pg3200.txt, pg100.txt +disguised, pg31100.txt, pg3200.txt, pg100.txt +disguised; pg100.txt +disguises pg3200.txt +disguises? pg100.txt +disguising pg3200.txt +disgust pg31100.txt, pg3200.txt +disgust. pg3200.txt +disgust." pg31100.txt +disgusted pg31100.txt +disgusted. pg3200.txt +disgusting pg31100.txt, pg3200.txt +disgusting!" pg31100.txt +disgusting. pg31100.txt, pg3200.txt +dish pg3200.txt, pg100.txt +dish! pg100.txt +dish'd pg100.txt +dish, pg3200.txt, pg100.txt +dish-- pg3200.txt +dish-water, pg3200.txt +dish. pg3200.txt, pg100.txt +dishclout pg100.txt +dishearten pg100.txt +disheartened, pg3200.txt +disheartening pg3200.txt +disheartens pg100.txt +dishes pg31100.txt, pg3200.txt +dishes! pg100.txt +dishes, pg3200.txt, pg100.txt +dishes. pg3200.txt, pg100.txt +dishonest pg3200.txt +dishonest--for pg31100.txt +dishonest. pg100.txt +dishonestly pg3200.txt +dishonestly. pg3200.txt +dishonesty pg100.txt +dishonesty. pg100.txt +dishonor pg3200.txt +dishonor. pg3200.txt +dishonorable pg31100.txt +dishonorably pg3200.txt +dishonored pg3200.txt +dishonors, pg100.txt +dishonour pg31100.txt, pg100.txt +dishonour'd pg100.txt +dishonour'd. pg100.txt +dishonour, pg100.txt +dishonour. pg100.txt +dishonour; pg100.txt +dishonourably pg31100.txt +dishonoured pg100.txt +dishonours pg100.txt +disinclination pg31100.txt +disinclined pg31100.txt +disingenuous, pg31100.txt +disinherit. pg100.txt +disinherited pg31100.txt +disinherited. pg100.txt +disinterested pg31100.txt, pg3200.txt +disinterestedness pg31100.txt +disinterestedness; pg31100.txt +disirous, pg3200.txt +disjoins pg100.txt +disjointed pg3200.txt +disk pg3200.txt +disk, pg3200.txt +disk--" pg3200.txt +disks pg3200.txt +dislik'st pg100.txt +dislik'st, pg100.txt +dislike pg31100.txt, pg100.txt +dislike, pg31100.txt, pg100.txt +dislike. pg31100.txt, pg100.txt +dislike; pg31100.txt, pg100.txt +disliked pg31100.txt, pg3200.txt +disliken pg100.txt +dislikes pg31100.txt +dislikes- pg100.txt +dislocating pg3200.txt +dislodge pg3200.txt +dislodged pg3200.txt +disloyal pg3200.txt +disloyal, pg100.txt +disloyal. pg100.txt +disloyal.' pg100.txt +disloyal; pg3200.txt +disloyal? pg100.txt +disloyalty pg3200.txt +disloyalty; pg100.txt +dismal pg3200.txt +dismal! pg3200.txt +dismal. pg3200.txt +dismal." pg3200.txt +dismal; pg100.txt +dismally pg3200.txt +dismantle pg100.txt +dismantled pg3200.txt, pg100.txt +dismay pg31100.txt, pg3200.txt, pg100.txt +dismay'd pg100.txt +dismay'd. pg100.txt +dismay'd: pg100.txt +dismay'd; pg100.txt +dismay. pg100.txt +dismay; pg100.txt +dismayed pg3200.txt +dismember pg3200.txt +dismes pg100.txt +dismiss pg31100.txt, pg3200.txt, pg100.txt +dismiss'd pg100.txt +dismiss'd; pg100.txt +dismiss'd? pg100.txt +dismissal pg31100.txt, pg3200.txt +dismissal. pg3200.txt +dismissed pg31100.txt, pg3200.txt +dismissed" pg31100.txt +dismissed. pg31100.txt, pg3200.txt +dismissed." pg3200.txt +dismissed._"] pg31100.txt +dismisses pg3200.txt +dismission pg100.txt +dismount pg3200.txt +dismount, pg100.txt +dismount. pg3200.txt +dismounted pg3200.txt +dismounted. pg100.txt +dismounted;--she pg31100.txt +disobedience pg3200.txt +disobedience. pg31100.txt +disobedient, pg3200.txt +disobedient; pg3200.txt +disobey pg100.txt +disobeys! pg100.txt +disobliging pg3200.txt +disorbited pg3200.txt +disorder pg31100.txt, pg3200.txt +disorder's pg100.txt +disorder, pg31100.txt, pg3200.txt +disorder. pg31100.txt, pg3200.txt, pg100.txt +disordered pg3200.txt +disordered, pg31100.txt +disorderly pg3200.txt +disorderly, pg3200.txt +disorders pg100.txt +disowned. pg3200.txt +disparagement pg31100.txt +disparagement, pg100.txt +disparagement. pg100.txt +disparity, pg31100.txt +dispassionately, pg3200.txt +dispatch pg3200.txt, pg100.txt +dispatch'd pg100.txt +dispatch'd! pg100.txt +dispatch'd'? pg100.txt +dispatch'd. pg100.txt +dispatch'd; pg100.txt +dispatch'd? pg100.txt +dispatch, pg100.txt +dispatch. pg100.txt +dispatch." pg3200.txt +dispatch; pg100.txt +dispatch? pg3200.txt +dispatched pg31100.txt, pg3200.txt +dispatched, pg31100.txt, pg3200.txt +dispelled pg3200.txt +dispelled--that pg31100.txt +dispelling pg3200.txt +dispensation, pg31100.txt +dispensations pg3200.txt +dispensations, pg3200.txt +dispense pg31100.txt +dispense. pg31100.txt, pg100.txt +dispense; pg100.txt +dispenser pg3200.txt +dispers'd pg100.txt +dispers'd, pg100.txt +disperse pg31100.txt, pg100.txt +disperse. pg100.txt +dispersed pg31100.txt +dispersed. pg3200.txt +dispersion pg3200.txt +dispiacentissimo. pg3200.txt +displace pg100.txt +display pg31100.txt, pg3200.txt, pg100.txt +display, pg3200.txt +display--the pg3200.txt +display-head pg3200.txt +display. pg31100.txt, pg3200.txt +display." pg3200.txt +display.' pg3200.txt +display; pg31100.txt +displayed pg3200.txt +displayed, pg100.txt +displayed; pg3200.txt +displaying pg31100.txt, pg3200.txt +displaying, pg31100.txt, pg3200.txt, pg100.txt +displeas'd pg100.txt +displeas'd. pg100.txt +displease pg31100.txt, pg100.txt +displeased pg31100.txt, pg3200.txt +displeased, pg31100.txt +displeased. pg31100.txt +displeased." pg31100.txt +displeasure pg31100.txt, pg3200.txt, pg100.txt +displeasure, pg31100.txt, pg100.txt +displeasure. pg31100.txt, pg100.txt +disporting pg3200.txt +dispos'd, pg100.txt +dispos'd. pg100.txt +disposal pg31100.txt, pg3200.txt +disposal. pg31100.txt, pg3200.txt +disposal; pg31100.txt +dispose pg31100.txt, pg3200.txt, pg100.txt +dispose, pg100.txt +disposed pg31100.txt, pg3200.txt +disposed! pg31100.txt +disposed. pg3200.txt +disposer pg100.txt +disposing pg3200.txt +disposition pg31100.txt, pg3200.txt, pg100.txt +disposition, pg31100.txt, pg3200.txt, pg100.txt +disposition--and pg31100.txt +disposition. pg31100.txt, pg3200.txt, pg100.txt +disposition; pg31100.txt, pg3200.txt +dispositions, pg31100.txt, pg100.txt +dispossessed pg3200.txt +disprais'd. pg100.txt +dispraise pg100.txt +dispraise, pg100.txt +dispraise? pg100.txt +dispraisingly pg100.txt +disproportion pg31100.txt +disproportion'd; pg100.txt +disproportionate pg31100.txt +disproportioned pg3200.txt +disproved, pg3200.txt +disputants pg3200.txt +disputants. pg3200.txt +disputation. pg100.txt +disputatious pg3200.txt +dispute pg31100.txt, pg3200.txt +dispute, pg3200.txt +dispute. pg3200.txt +dispute: pg3200.txt +disputed pg3200.txt +disputes pg31100.txt, pg3200.txt +disputes." pg31100.txt +disputing pg3200.txt +disqualifications pg31100.txt +disquiet pg3200.txt +disquiet, pg31100.txt +disquiet. pg100.txt +disquiet; pg100.txt +disquieted pg31100.txt +disquieting. pg3200.txt +disquisition, pg3200.txt +disregarded. pg31100.txt +disregarded; pg31100.txt +disregarding pg3200.txt +disregards pg3200.txt +disremember pg3200.txt +disreputable pg3200.txt +disreputable. pg3200.txt +disrespect pg31100.txt, pg3200.txt +disrespect, pg3200.txt +disrespectful. pg3200.txt +disrespectfully pg3200.txt +disrobe pg100.txt +disrupted pg3200.txt +dissatisfaction pg31100.txt, pg3200.txt +dissatisfaction, pg31100.txt, pg3200.txt +dissatisfied pg31100.txt, pg3200.txt +dissatisfied. pg3200.txt +dissatisfied: pg3200.txt +dissected pg3200.txt +dissemble! pg100.txt +dissemble, pg100.txt +dissemble. pg100.txt +dissembled, pg3200.txt +dissemblers. pg100.txt +dissembling. pg31100.txt +dissension; pg100.txt +dissensions." pg3200.txt +dissent pg3200.txt +dissenters. pg3200.txt +dissenting pg3200.txt +dissever pg100.txt +dissimilar pg31100.txt +dissimilar. pg31100.txt +dissimilarity pg31100.txt +dissipated pg31100.txt +dissipated, pg3200.txt +dissipating pg31100.txt +dissipation pg31100.txt +dissipation, pg31100.txt, pg3200.txt +dissipation. pg31100.txt, pg3200.txt +dissipation." pg3200.txt +dissipations pg31100.txt +dissociate pg3200.txt +dissolutely. pg100.txt +dissolution pg3200.txt +dissolution, pg31100.txt +dissolution." pg3200.txt +dissolve pg3200.txt +dissolve, pg3200.txt, pg100.txt +dissolved pg3200.txt +dissolved, pg3200.txt, pg100.txt +dissolved. pg31100.txt +dissolved." pg3200.txt +dissonance pg3200.txt +dissuade pg31100.txt, pg100.txt +distaff pg100.txt +distaffs. pg100.txt +distance pg31100.txt, pg3200.txt, pg100.txt +distance, pg31100.txt, pg3200.txt, pg100.txt +distance--a pg3200.txt +distance--that pg3200.txt +distance. pg31100.txt, pg3200.txt, pg100.txt +distance." pg31100.txt, pg3200.txt +distance: pg3200.txt +distance; pg31100.txt, pg3200.txt, pg100.txt +distance?' pg3200.txt +distances pg3200.txt +distances, pg3200.txt +distances--side-screens pg31100.txt +distances. pg3200.txt +distances; pg3200.txt +distant pg31100.txt, pg3200.txt +distant! pg3200.txt +distant, pg31100.txt, pg3200.txt +distant. pg31100.txt, pg3200.txt +distant; pg31100.txt +distantly pg31100.txt, pg3200.txt +distaste pg31100.txt +distaste, pg100.txt +distasteful pg3200.txt +distasteful. pg31100.txt +distemp'rature, pg100.txt +distemp'rature. pg100.txt +distemp'rature; pg100.txt +distemper pg100.txt +distemper'd. pg100.txt +distemper's." pg3200.txt +distemper-- pg3200.txt +distemper. pg100.txt +distemper; pg100.txt +distemper?" pg3200.txt +distemperature pg100.txt +distempered; pg100.txt +distension pg3200.txt +distill'd pg100.txt +distillation, pg100.txt +distilled: pg100.txt +distinct pg3200.txt +distinct, pg3200.txt +distinct-- pg3200.txt +distinct. pg31100.txt, pg3200.txt +distinct: pg3200.txt +distinct; pg3200.txt +distinction pg31100.txt, pg3200.txt, pg100.txt +distinction, pg31100.txt, pg3200.txt +distinction. pg3200.txt +distinction." pg31100.txt +distinction.--emma pg31100.txt +distinction; pg3200.txt +distinction? pg100.txt +distinction?" pg31100.txt, pg3200.txt +distinctions pg31100.txt, pg3200.txt +distinctive pg3200.txt +distinctly pg31100.txt, pg3200.txt +distinctly, pg3200.txt, pg100.txt +distinctly. pg3200.txt +distinctly; pg100.txt +distinctness. pg3200.txt +distingue pg100.txt +distinguish pg31100.txt, pg3200.txt, pg100.txt +distinguish. pg3200.txt +distinguishable. pg31100.txt +distinguished pg31100.txt, pg3200.txt +distinguished, pg3200.txt +distinguished. pg31100.txt +distinguished; pg31100.txt +distinguishes pg31100.txt +distinguishing pg31100.txt +distort pg3200.txt +distorted pg3200.txt +distorted. pg3200.txt +distract pg3200.txt, pg100.txt +distract, pg100.txt +distract. pg3200.txt, pg100.txt +distract; pg100.txt +distract? pg100.txt +distracted pg3200.txt +distracted, pg100.txt +distracted. pg31100.txt, pg100.txt +distracted." pg31100.txt +distractedly. pg100.txt +distracting pg3200.txt +distraction pg3200.txt +distraction!" pg3200.txt +distraction, pg100.txt +distraction. pg3200.txt, pg100.txt +distraction; pg3200.txt, pg100.txt +distractions pg3200.txt, pg100.txt +distraught pg3200.txt +distraught, pg3200.txt, pg100.txt +distress pg31100.txt, pg3200.txt, pg100.txt +distress'd, pg100.txt +distress'd; pg100.txt +distress, pg31100.txt, pg3200.txt, pg100.txt +distress--but pg31100.txt +distress--can pg31100.txt +distress--jesuit pg3200.txt +distress. pg31100.txt, pg3200.txt, pg100.txt +distress." pg31100.txt +distress: pg3200.txt +distress; pg31100.txt +distress? pg100.txt +distress?" pg31100.txt +distressed pg31100.txt, pg3200.txt +distressed, pg31100.txt, pg3200.txt +distressed. pg31100.txt, pg3200.txt +distressed: pg3200.txt +distresses pg31100.txt, pg3200.txt +distresses, pg3200.txt +distresses. pg100.txt +distresseth pg3200.txt +distressful pg3200.txt +distressfully pg31100.txt +distressing pg31100.txt, pg3200.txt +distressing--found pg31100.txt +distressing. pg31100.txt +distressing; pg3200.txt +distressingly pg3200.txt +distribute pg3200.txt +distribute, pg100.txt +distributed pg3200.txt, pg100.txt +distributed, pg3200.txt +distributed. pg3200.txt, pg100.txt +distributed: pg31100.txt, pg3200.txt, pg100.txt +distributes pg3200.txt +distributing pg31100.txt, pg3200.txt, pg100.txt +distribution pg31100.txt, pg3200.txt, pg100.txt +distribution. pg3200.txt +district pg3200.txt +district' pg3200.txt +district, pg3200.txt +district. pg3200.txt +district?" pg3200.txt +districts--what pg3200.txt +distrust pg31100.txt, pg3200.txt, pg100.txt +distrust, pg100.txt +distrust. pg100.txt +distrust; pg31100.txt +distrusted pg31100.txt +distrustful pg3200.txt +disturb pg31100.txt, pg3200.txt +disturb'd pg100.txt +disturbance pg31100.txt, pg3200.txt +disturbance, pg31100.txt, pg3200.txt +disturbances, pg3200.txt +disturbances; pg3200.txt +disturbed pg31100.txt, pg3200.txt +disturbed, pg31100.txt, pg3200.txt +disturbed--the pg3200.txt +disturbed. pg31100.txt, pg3200.txt +disturbed; pg31100.txt +disturber pg3200.txt +disturbers pg31100.txt +disturbers, pg100.txt +disturbing pg3200.txt +disunite! pg100.txt +disuse pg3200.txt +disvalued pg100.txt +dit-il. pg100.txt +dit-il? pg100.txt +ditch pg100.txt +ditch, pg3200.txt, pg100.txt +ditch. pg3200.txt +ditch? pg100.txt +ditches pg3200.txt +ditches, pg3200.txt +ditty, pg3200.txt +diuersite pg3200.txt +divan pg3200.txt +divan, pg3200.txt +divans pg3200.txt +dive pg3200.txt +dive, pg3200.txt +diver pg3200.txt, pg100.txt +diverge pg3200.txt +diverging pg3200.txt +divers pg3200.txt +diversified pg3200.txt +diversion pg3200.txt +diversion, pg31100.txt, pg3200.txt +diversion." pg3200.txt +diversity pg3200.txt +divert pg3200.txt +diverted pg31100.txt, pg3200.txt +diverted. pg31100.txt +diverted." pg3200.txt +diverted?" pg31100.txt +dives pg3200.txt +divide pg31100.txt, pg3200.txt, pg100.txt +divide!' pg3200.txt +divide, pg31100.txt, pg100.txt +divide. pg3200.txt, pg100.txt +divided pg31100.txt, pg3200.txt, pg100.txt +divided, pg31100.txt +divided. pg3200.txt +dividend, pg3200.txt +dividend--either pg3200.txt +dividend; pg3200.txt +dividends. pg3200.txt +divides pg100.txt +dividing pg3200.txt +diviety, pg3200.txt +divination, pg100.txt +divine pg31100.txt, pg3200.txt, pg100.txt +divine! pg100.txt +divine!" pg3200.txt +divine, pg3200.txt, pg100.txt +divine--or pg3200.txt +divine. pg3200.txt, pg100.txt +divine; pg3200.txt, pg100.txt +divined pg3200.txt +divinely pg3200.txt +divineness pg100.txt +divines pg3200.txt +divines. pg100.txt +divines; pg100.txt +divinest, pg3200.txt +diving-bell, pg3200.txt +divining pg31100.txt +divining-rod pg3200.txt +divinity, pg100.txt +divinity-circuit pg3200.txt +divinity. pg100.txt +division pg31100.txt, pg3200.txt, pg100.txt +division, pg100.txt +division-agent pg3200.txt +division-agents, pg3200.txt +division. pg3200.txt, pg100.txt +division: pg100.txt +division; pg3200.txt, pg100.txt +divisions pg31100.txt +divorc'd pg100.txt +divorc'd, pg100.txt +divorce pg31100.txt, pg3200.txt, pg100.txt +divorce, pg100.txt +divorce. pg3200.txt +divorce; pg100.txt +divorced pg100.txt +divorces pg3200.txt +divulge pg3200.txt +divulge. pg3200.txt +dixon pg31100.txt, pg3200.txt +dixon!" pg31100.txt +dixon's pg31100.txt +dixon, pg31100.txt +dixon. pg31100.txt +dixon." pg31100.txt +dixon?" pg31100.txt +dizzy pg3200.txt +dizzy. pg3200.txt +do! pg31100.txt, pg3200.txt, pg100.txt +do!" pg31100.txt, pg3200.txt +do!' pg3200.txt +do" pg3200.txt +do"--and pg3200.txt +do' pg3200.txt +do'!' pg3200.txt +do'.' pg3200.txt +do't pg100.txt +do't! pg100.txt +do't, pg100.txt +do't. pg100.txt +do't; pg100.txt +do) pg3200.txt +do, pg31100.txt, pg3200.txt, pg100.txt +do,' pg3200.txt, pg100.txt +do,--and pg31100.txt +do- pg100.txt +do-- pg3200.txt +do--but pg3200.txt +do--down pg3200.txt +do--for pg31100.txt +do--i pg31100.txt, pg3200.txt +do--not pg31100.txt +do--population pg3200.txt +do--she pg3200.txt +do--you've pg3200.txt +do-nothing pg31100.txt +do. pg31100.txt, pg3200.txt, pg100.txt +do." pg31100.txt, pg3200.txt +do.' pg3200.txt +do: pg31100.txt, pg3200.txt, pg100.txt +do; pg31100.txt, pg3200.txt, pg100.txt +do? pg31100.txt, pg3200.txt, pg100.txt +do?" pg31100.txt, pg3200.txt +do?' pg3200.txt +do?'" pg3200.txt +do?--pretend pg3200.txt +doan' pg3200.txt +doated pg31100.txt +dobb pg3200.txt +dobbins' pg3200.txt +doble, pg3200.txt +dobson pg31100.txt +dobson's pg3200.txt +doc. pg3200.txt +docile, pg3200.txt +docile. pg3200.txt +docility. pg3200.txt +dock?" pg3200.txt +docked pg3200.txt +dockyard, pg31100.txt +dockyards pg3200.txt +doctor pg31100.txt, pg3200.txt, pg100.txt +doctor! pg3200.txt, pg100.txt +doctor!" pg3200.txt +doctor, pg3200.txt, pg100.txt +doctor- pg100.txt +doctor--take pg3200.txt +doctor-books pg3200.txt +doctor-stuff. pg3200.txt +doctor. pg3200.txt, pg100.txt +doctor." pg3200.txt +doctor: pg3200.txt +doctor; pg3200.txt +doctor? pg100.txt +doctor?" pg3200.txt +doctors pg3200.txt +doctors. pg3200.txt +doctors." pg3200.txt +doctors; pg3200.txt +doctrine pg3200.txt +doctrine, pg100.txt +doctrine. pg3200.txt +doctrine: pg100.txt +doctrines pg31100.txt, pg3200.txt +doctrines, pg3200.txt +document pg3200.txt +document): pg3200.txt +document, pg3200.txt +document. pg3200.txt +documentary pg3200.txt +documented pg3200.txt +documents pg3200.txt +documents. pg3200.txt +documents; pg3200.txt +dodge pg3200.txt, pg100.txt +dodge's pg3200.txt +dodge." pg3200.txt +dodged pg3200.txt +dodged--we pg3200.txt +dodging pg3200.txt +dodo pg3200.txt +dodo, pg3200.txt +dodo." pg3200.txt +doe, pg100.txt +doe; pg100.txt +doe? pg100.txt +doer pg31100.txt +doers! pg100.txt +doers. pg100.txt +does pg31100.txt, pg3200.txt, pg100.txt +does, pg31100.txt, pg3200.txt, pg100.txt +does--and pg3200.txt +does. pg31100.txt, pg3200.txt, pg100.txt +does." pg31100.txt, pg3200.txt +does.' pg3200.txt +does?" pg3200.txt +doesn't pg3200.txt +doesn't--er--" pg3200.txt +doest, pg100.txt +doffed pg3200.txt +dog pg3200.txt, pg100.txt +dog! pg3200.txt, pg100.txt +dog!" pg3200.txt +dog'. pg3200.txt +dog'd pg3200.txt +dog's pg3200.txt, pg100.txt +dog, pg3200.txt, pg100.txt +dog--he pg3200.txt +dog--the pg3200.txt +dog-apes; pg100.txt +dog-cart pg3200.txt +dog-collar, pg3200.txt +dog-days pg100.txt +dog-disapproval, pg3200.txt +dog-fennel pg3200.txt +dog-fightless pg3200.txt +dog-hair, pg3200.txt +dog-show. pg3200.txt +dog-tired pg3200.txt +dog-tired, pg3200.txt +dog-tired. pg3200.txt +dog-tired." pg3200.txt +dog-tooth pg3200.txt +dog. pg3200.txt, pg100.txt +dog." pg3200.txt +dog.' pg100.txt +dog; pg100.txt +dog? pg100.txt +dog?' pg3200.txt +dog?--hunt pg3200.txt +dogberry. pg100.txt +doge's pg3200.txt +doge, pg3200.txt +doge. pg3200.txt +dogfish, pg100.txt +doggerel pg3200.txt +doggerel. pg3200.txt +doggery pg3200.txt +doggie pg3200.txt +doggie, pg3200.txt +doggy!--poor pg3200.txt +dogmas pg3200.txt +dogmatic pg3200.txt +dogs pg31100.txt, pg3200.txt, pg100.txt +dogs! pg31100.txt, pg100.txt +dogs!" pg3200.txt +dogs, pg31100.txt, pg3200.txt, pg100.txt +dogs-- pg3200.txt +dogs--circle pg3200.txt +dogs--more pg3200.txt +dogs. pg3200.txt, pg100.txt +dogs." pg3200.txt +dogs.' pg3200.txt +dogs: pg3200.txt +dogs; pg3200.txt, pg100.txt +dogs?" pg3200.txt +doigts? pg100.txt +doin's." pg3200.txt +doin's; pg3200.txt +doing pg31100.txt, pg3200.txt, pg100.txt +doing!" pg31100.txt, pg3200.txt +doing, pg31100.txt, pg3200.txt, pg100.txt +doing. pg31100.txt, pg3200.txt, pg100.txt +doing." pg31100.txt, pg3200.txt +doing; pg3200.txt +doing? pg3200.txt +doing?" pg31100.txt, pg3200.txt +doing?' pg3200.txt +doing?--how pg31100.txt +doings pg3200.txt +doings, pg3200.txt +doings--on pg3200.txt +doings; pg3200.txt +doit pg100.txt +doit. pg100.txt +doits, pg100.txt +dol!--hellum-a-lee! pg3200.txt +dol--" pg3200.txt +dol.c pg3200.txt +dolabella pg100.txt +dolabella! pg100.txt +dolabella, pg100.txt +dolby pg3200.txt +doldrums pg3200.txt +dole! pg3200.txt, pg100.txt +dole, pg100.txt +doleful pg3200.txt +doll pg100.txt +doll! pg100.txt +doll's pg3200.txt +doll--and pg3200.txt +doll--his pg3200.txt +doll-clothes, pg3200.txt +doll. pg3200.txt, pg100.txt +doll] pg100.txt +dollahs pg3200.txt +dollahs. pg3200.txt +dollar pg3200.txt +dollar, pg3200.txt +dollar--there pg3200.txt +dollar-a-head pg3200.txt +dollar-and-a-half pg3200.txt +dollar-piece pg3200.txt +dollar. pg3200.txt, pg100.txt +dollar." pg3200.txt +dollars pg3200.txt +dollars! pg3200.txt +dollars!" pg3200.txt +dollars!--hurrah!--make pg3200.txt +dollars' pg3200.txt +dollars, pg3200.txt +dollars,) pg3200.txt +dollars--and pg3200.txt +dollars--it pg3200.txt +dollars--when pg3200.txt +dollars-two pg3200.txt +dollars. pg3200.txt +dollars." pg3200.txt +dollars.' pg3200.txt +dollars; pg3200.txt +dollars? pg3200.txt +dollars?" pg3200.txt +dollinger, pg3200.txt +dolor. pg100.txt +dolorous pg3200.txt +dolour. pg100.txt +dolours pg100.txt +dolphin, pg3200.txt +dolphin; pg100.txt +dolt! pg100.txt +doltish pg3200.txt +domain pg3200.txt +domain!! pg100.txt +domains pg3200.txt +domains; pg3200.txt +dome pg3200.txt +dome, pg3200.txt +domed pg3200.txt +domes pg3200.txt +domes, pg3200.txt +domestic pg31100.txt, pg3200.txt +domestic, pg31100.txt +domestic. pg3200.txt +domestics pg3200.txt +domestics, pg31100.txt +domicile, pg3200.txt +dominating pg3200.txt +domine. pg100.txt +domineer, pg100.txt +dominican. pg3200.txt +dominion pg31100.txt +dominion, pg31100.txt, pg3200.txt +dominion." pg3200.txt +dominions pg3200.txt, pg100.txt +dominions, pg3200.txt, pg100.txt +dominions. pg3200.txt +dominions; pg100.txt +dominoes, pg3200.txt +dominoes. pg3200.txt +domitius: pg100.txt +domitius? pg100.txt +dommelton pg100.txt +domremy pg3200.txt +domremy, pg3200.txt +domremy. pg3200.txt +don pg3200.txt, pg100.txt +don't pg31100.txt, pg3200.txt +don't!" pg3200.txt +don't, pg3200.txt +don't. pg3200.txt +don't." pg3200.txt +don't.' pg3200.txt +don't; pg3200.txt +don't?" pg3200.txt +don'ts. pg3200.txt +donalbain pg100.txt +donalbain. pg100.txt +donate pg3200.txt +donate. pg31100.txt, pg3200.txt, pg100.txt +donation pg31100.txt, pg3200.txt, pg100.txt +donation, pg100.txt +donation. pg3200.txt +donations pg31100.txt, pg3200.txt, pg100.txt +donations. pg31100.txt, pg100.txt +donavan pg31100.txt +donavan, pg31100.txt +donc pg3200.txt +doncaster, pg100.txt +doncaster; pg100.txt +done pg31100.txt, pg3200.txt, pg100.txt +done! pg3200.txt, pg100.txt +done!" pg31100.txt, pg3200.txt +done't pg100.txt +done't, pg100.txt +done't- pg100.txt +done't. pg100.txt +done't; pg100.txt +done't? pg100.txt +done) pg100.txt +done, pg31100.txt, pg3200.txt, pg100.txt +done- pg100.txt +done--" pg3200.txt +done--by pg3200.txt +done--for pg3200.txt +done--i pg31100.txt +done--that." pg3200.txt +done--the pg3200.txt +done. pg31100.txt, pg3200.txt, pg100.txt +done." pg31100.txt, pg3200.txt +done.' pg3200.txt +done.'" pg3200.txt +done.--i pg31100.txt +done: pg100.txt +done; pg31100.txt, pg3200.txt, pg100.txt +done? pg31100.txt, pg3200.txt, pg100.txt +done?" pg31100.txt, pg3200.txt +donelson. pg3200.txt +donjon-keep pg3200.txt +donkey pg3200.txt +donkey's pg3200.txt +donkey, pg3200.txt +donkey-boys. pg3200.txt +donkey. pg3200.txt +donkey; pg3200.txt +donkeys pg3200.txt +donkeys, pg3200.txt +donner pg3200.txt +dono. pg3200.txt +dono." pg3200.txt +dontchuknow, pg3200.txt +dontchuknow--i pg3200.txt +dontchuknow." pg3200.txt +donwell pg31100.txt +donwell, pg31100.txt +donwell--some pg31100.txt +donwell--very pg31100.txt +donwell. pg31100.txt +donwell?--_there_ pg31100.txt +doodle-bug, pg3200.txt +doodleville pg3200.txt +dooley's pg3200.txt +doom pg3200.txt, pg100.txt +doom! pg100.txt +doom'd! pg100.txt +doom, pg3200.txt, pg100.txt +doom- pg100.txt +doom. pg3200.txt, pg100.txt +doom: pg100.txt +doom; pg100.txt +doom? pg100.txt +doom?" pg3200.txt +doomed pg31100.txt, pg3200.txt +doomed!" pg3200.txt +doomed." pg3200.txt +doomsday. pg100.txt +door pg31100.txt, pg3200.txt, pg100.txt +door! pg100.txt +door!" pg3200.txt +door, pg31100.txt, pg3200.txt, pg100.txt +door--'" pg3200.txt +door---- pg3200.txt +door--fakeer pg3200.txt +door--looking pg3200.txt +door--you pg3200.txt +door-knob pg3200.txt +door-knob, pg3200.txt +door-plate pg3200.txt +door-post pg3200.txt +door-sill. pg3200.txt +door-step.) pg3200.txt +door-steps pg3200.txt +door. pg31100.txt, pg3200.txt, pg100.txt +door." pg31100.txt, pg3200.txt +door._] pg31100.txt +door: pg3200.txt +door; pg31100.txt, pg3200.txt, pg100.txt +door? pg3200.txt, pg100.txt +door?" pg3200.txt +doorknob, pg3200.txt +doorplate." pg3200.txt +doors pg31100.txt, pg3200.txt, pg100.txt +doors! pg100.txt +doors, pg31100.txt, pg3200.txt, pg100.txt +doors--but pg3200.txt +doors. pg31100.txt, pg3200.txt, pg100.txt +doors." pg3200.txt +doors.' pg3200.txt +doors; pg3200.txt, pg100.txt +doors? pg100.txt +doorsteps pg3200.txt +doorway pg3200.txt +doorway. pg3200.txt +doorways. pg3200.txt +dorcas pg3200.txt +dorcas?" pg3200.txt +doricles pg100.txt +doricles, pg100.txt +dorn pg3200.txt +dorn. pg3200.txt +dorothea. pg31100.txt +dorothy. pg31100.txt, pg100.txt +dorset pg100.txt +dorset, pg100.txt +dorset; pg100.txt +dorsetshire pg31100.txt +dorsetshire! pg31100.txt +dorsetshire, pg31100.txt +dorsetshire." pg31100.txt +dose, pg3200.txt +dosed pg3200.txt +doses, pg3200.txt +doses. pg3200.txt +dost pg31100.txt, pg3200.txt, pg100.txt +dost, pg100.txt +dost- pg100.txt +dost. pg100.txt +dost; pg100.txt +dot, pg3200.txt +dot-and-go-one!" pg3200.txt +dot. pg3200.txt +dot." pg3200.txt +dotage. pg100.txt +dote pg3200.txt, pg100.txt +dote, pg100.txt +dote. pg100.txt +dote; pg100.txt +doter's pg3200.txt +dotes pg100.txt +dotes, pg100.txt +doth pg3200.txt, pg100.txt +doth, pg100.txt +doth--" pg3200.txt +doth. pg100.txt +doth; pg100.txt +doth? pg100.txt +dotted pg3200.txt +double pg31100.txt, pg3200.txt, pg100.txt +double, pg31100.txt, pg100.txt +double-barreled pg3200.txt +double-damn'd; pg100.txt +double-dealer. pg100.txt +double-shotted pg3200.txt +double-team pg3200.txt +double. pg3200.txt, pg100.txt +double." pg3200.txt +double; pg3200.txt +doubled pg3200.txt +doubled, pg3200.txt +doubled." pg3200.txt +doublet pg3200.txt, pg100.txt +doublet, pg100.txt +doublet--glanced pg3200.txt +doublet. pg100.txt +doublets pg3200.txt +doublets. pg100.txt +doubly pg31100.txt +doubly. pg100.txt +doubt pg31100.txt, pg3200.txt, pg100.txt +doubt! pg100.txt +doubt!" pg3200.txt +doubt)--you pg31100.txt +doubt, pg31100.txt, pg3200.txt, pg100.txt +doubt-- pg3200.txt +doubt--' pg3200.txt +doubt--conviction--doubt pg3200.txt +doubt--though pg3200.txt +doubt. pg31100.txt, pg3200.txt, pg100.txt +doubt." pg3200.txt +doubt: pg100.txt +doubt:) pg3200.txt +doubt; pg31100.txt, pg3200.txt, pg100.txt +doubt? pg3200.txt, pg100.txt +doubt?" pg3200.txt +doubted pg31100.txt, pg3200.txt, pg100.txt +doubted, pg3200.txt, pg100.txt +doubted. pg3200.txt +doubted: pg31100.txt, pg3200.txt +doubted; pg31100.txt +doubted;--nor pg31100.txt +doubtful pg3200.txt, pg100.txt +doubtful, pg3200.txt +doubtful--but pg3200.txt +doubtful. pg31100.txt, pg3200.txt, pg100.txt +doubtful." pg3200.txt +doubtful; pg100.txt +doubtfully pg100.txt +doubtfully. pg100.txt +doubting pg31100.txt, pg3200.txt +doubting. pg3200.txt +doubtless pg3200.txt, pg100.txt +doubtless, pg100.txt +doubtless--a pg3200.txt +doubtless. pg3200.txt, pg100.txt +doubts pg31100.txt, pg3200.txt +doubts, pg31100.txt, pg3200.txt +doubts. pg3200.txt, pg100.txt +doubts; pg3200.txt +douceur, pg31100.txt +dough. pg3200.txt +doughface, pg3200.txt +doughnut. pg3200.txt +doughnuts pg3200.txt +doughnuts, pg3200.txt +doughnuts; pg3200.txt +douglas pg3200.txt, pg100.txt +douglas'- pg100.txt +douglas'." pg3200.txt +douglas, pg3200.txt, pg100.txt +douglas. pg3200.txt, pg100.txt +douglas? pg100.txt +douglas]. pg100.txt +douglass pg3200.txt +dove pg100.txt +dove! pg100.txt +dove-colored) pg3200.txt +dove-cote, pg100.txt +dove. pg100.txt +dove: pg100.txt +dove; pg100.txt +dove? pg100.txt +dovelike, pg3200.txt +dover, pg3200.txt +dover. pg100.txt +dover? pg100.txt +doves, pg100.txt +doves; pg3200.txt +doves? pg3200.txt +dow'r pg100.txt +dowager pg31100.txt, pg100.txt +dowager, pg100.txt +dowager. pg3200.txt +dowdy pg3200.txt +dower pg3200.txt, pg100.txt +dower! pg100.txt +dower, pg100.txt +dower; pg100.txt +dower? pg100.txt +dowley pg3200.txt +dowls pg3200.txt +down! pg3200.txt, pg100.txt +down!" pg3200.txt +down!' pg3200.txt +down" pg3200.txt +down, pg31100.txt, pg3200.txt, pg100.txt +down," pg3200.txt +down- pg100.txt +down-- pg3200.txt +down--a pg3200.txt +down--but pg3200.txt +down--i pg3200.txt +down--in pg3200.txt +down--it pg3200.txt +down--you pg3200.txt +down-hearted, pg3200.txt +down-hearted. pg3200.txt +down-rased, pg100.txt +down-stairs pg3200.txt +down-stairs, pg3200.txt +down-stream, pg3200.txt +down-stream; pg3200.txt +down-town pg3200.txt +down. pg31100.txt, pg3200.txt, pg100.txt +down." pg31100.txt, pg3200.txt +down.) pg3200.txt +down.] pg100.txt +down: pg31100.txt, pg3200.txt, pg100.txt +down; pg31100.txt, pg3200.txt, pg100.txt +down;' pg3200.txt +down? pg31100.txt, pg3200.txt, pg100.txt +down?" pg31100.txt, pg3200.txt +down?' pg3200.txt +down] pg100.txt +downburnt. pg3200.txt +downfall. pg100.txt +downhearted, pg3200.txt +downhearted." pg3200.txt +downhearted; pg3200.txt +downhill, pg3200.txt +downhill. pg3200.txt +downing pg3200.txt +download pg100.txt +downpour pg3200.txt +downright pg31100.txt +downright. pg100.txt +downs pg3200.txt +downs, pg3200.txt, pg100.txt +downs. pg31100.txt +downs." pg31100.txt +downstairs pg3200.txt +downstairs, pg3200.txt +downstairs; pg31100.txt +downstream pg3200.txt +downtown. pg3200.txt +downward pg3200.txt +downward! pg3200.txt +downward, pg3200.txt +downward. pg3200.txt +downward; pg3200.txt +downwards. pg3200.txt +dowry pg100.txt +dowry, pg100.txt +dowry. pg100.txt +dowry.' pg100.txt +dowry: pg100.txt +doyle pg3200.txt +doze. pg3200.txt +dozed pg3200.txt +dozen pg31100.txt, pg3200.txt, pg100.txt +dozen, pg3200.txt +dozen- pg100.txt +dozen--then pg3200.txt +dozen. pg31100.txt, pg3200.txt +dozen." pg3200.txt +dozen; pg3200.txt +dozens pg3200.txt +dozens." pg31100.txt +dozing pg3200.txt +dozing." pg3200.txt +dr pg31100.txt +dr. pg31100.txt, pg3200.txt +drab pg100.txt +drab, pg100.txt +drab? pg100.txt +drachmas. pg100.txt +drachms pg3200.txt +draft pg31100.txt, pg3200.txt +draft. pg3200.txt +draft?" pg3200.txt +drafted pg3200.txt +drafts pg3200.txt +drafts?" pg3200.txt +draftsmen pg3200.txt +drag pg3200.txt +drag! pg3200.txt +dragged pg3200.txt +dragged. pg3200.txt +draggers, pg3200.txt +dragging pg3200.txt +dragoman pg3200.txt +dragoman. pg3200.txt +dragomen pg3200.txt +dragon pg3200.txt +dragon, pg3200.txt +dragon-killers pg3200.txt +dragon; pg3200.txt +dragonish; pg100.txt +dragons pg3200.txt +dragons! pg100.txt +dragoon pg3200.txt +dragoons, pg3200.txt +drain pg100.txt +drain-pipes pg3200.txt +drains, pg3200.txt +drake pg31100.txt +dram pg100.txt +dram; pg3200.txt, pg100.txt +drama's pg3200.txt +drama, pg3200.txt +drama. pg3200.txt +dramatic pg3200.txt +dramatically pg3200.txt +dramatists, pg3200.txt +dramatization, pg3200.txt +dramatize pg3200.txt +dramatized pg3200.txt +drank pg31100.txt, pg3200.txt, pg100.txt +drank, pg31100.txt, pg3200.txt +drank. pg3200.txt +draped pg3200.txt +draperies pg3200.txt +drapery pg3200.txt +drapery. pg3200.txt +draught pg31100.txt, pg3200.txt, pg100.txt +draught, pg100.txt +draughted pg3200.txt +draughts pg31100.txt, pg3200.txt, pg100.txt +draughts, pg100.txt +draw pg31100.txt, pg3200.txt, pg100.txt +draw! pg100.txt +draw!" pg3200.txt +draw, pg3200.txt, pg100.txt +draw--" pg3200.txt +draw. pg3200.txt, pg100.txt +draw." pg3200.txt +draw; pg31100.txt, pg100.txt +draw?" pg31100.txt +draw] pg100.txt +drawback pg31100.txt +drawback, pg31100.txt, pg3200.txt +drawback. pg31100.txt, pg3200.txt +drawback." pg31100.txt +drawbacks pg3200.txt +drawbacks. pg3200.txt +drawbacks: pg3200.txt +drawbridge pg3200.txt +drawbridge, pg31100.txt +drawer pg31100.txt, pg3200.txt, pg100.txt +drawer. pg31100.txt, pg100.txt +drawers pg31100.txt, pg3200.txt, pg100.txt +drawers, pg31100.txt, pg3200.txt, pg100.txt +drawers. pg100.txt +drawing pg31100.txt, pg3200.txt, pg100.txt +drawing!" pg3200.txt +drawing, pg31100.txt, pg3200.txt +drawing- pg3200.txt +drawing-room pg31100.txt, pg3200.txt +drawing-room!" pg3200.txt +drawing-room, pg31100.txt, pg3200.txt +drawing-room. pg31100.txt, pg3200.txt +drawing-room.' pg3200.txt +drawing-room; pg31100.txt, pg3200.txt +drawing-rooms pg31100.txt +drawing-rooms, pg3200.txt +drawing-rooms. pg31100.txt +drawing-table, pg31100.txt +drawing. pg3200.txt +drawing." pg31100.txt +drawings pg31100.txt, pg3200.txt +drawl pg3200.txt +drawl. pg3200.txt +drawl: pg3200.txt +drawling pg3200.txt +drawn pg31100.txt, pg3200.txt, pg100.txt +drawn! pg100.txt +drawn, pg3200.txt, pg100.txt +drawn. pg100.txt +drawn; pg100.txt +drawn? pg100.txt +drawn?'" pg31100.txt +draws pg3200.txt, pg100.txt +draws] pg100.txt +drayman, pg3200.txt +drays, pg3200.txt +dread pg31100.txt, pg3200.txt, pg100.txt +dread, pg3200.txt +dread,--perhaps pg31100.txt +dread. pg3200.txt, pg100.txt +dreaded pg31100.txt, pg3200.txt +dreaded. pg31100.txt, pg100.txt +dreadful pg31100.txt, pg3200.txt +dreadful! pg31100.txt +dreadful!" pg31100.txt +dreadful, pg3200.txt +dreadful--" pg3200.txt +dreadful. pg31100.txt, pg3200.txt, pg100.txt +dreadful." pg31100.txt, pg3200.txt +dreadful.' pg3200.txt +dreadfully pg31100.txt, pg3200.txt, pg100.txt +dreadfully, pg31100.txt +dreadfully. pg31100.txt +dreading pg31100.txt, pg3200.txt +dreads pg3200.txt +dream pg31100.txt, pg3200.txt, pg100.txt +dream! pg31100.txt, pg3200.txt, pg100.txt +dream!" pg3200.txt +dream'd pg100.txt +dream, pg3200.txt, pg100.txt +dream--simply pg3200.txt +dream--where pg3200.txt +dream-land. pg3200.txt +dream-like? pg3200.txt +dream-marks pg3200.txt +dream-pictures pg3200.txt +dream-stuff pg3200.txt +dream. pg3200.txt, pg100.txt +dream." pg3200.txt +dream; pg3200.txt, pg100.txt +dream? pg3200.txt, pg100.txt +dream?" pg3200.txt +dream] pg100.txt +dreamed pg3200.txt +dreamer. pg31100.txt +dreamer; pg3200.txt +dreamers pg3200.txt +dreamily pg3200.txt +dreamily, pg3200.txt +dreamily-- pg3200.txt +dreaming pg31100.txt, pg3200.txt +dreaming, pg100.txt +dreaming. pg3200.txt +dreaming." pg3200.txt +dreaming?" pg3200.txt +dreamland. pg3200.txt +dreamless pg3200.txt +dreams pg3200.txt, pg100.txt +dreams! pg3200.txt +dreams, pg3200.txt, pg100.txt +dreams--as pg3200.txt +dreams--no, pg3200.txt +dreams--surely pg3200.txt +dreams. pg3200.txt, pg100.txt +dreams." pg100.txt +dreams; pg3200.txt, pg100.txt +dreamt pg100.txt +dreamy pg3200.txt +dreamy, pg3200.txt +dreamy; pg3200.txt +drearier. pg3200.txt +dreariest pg3200.txt +dreariest, pg3200.txt +dreary pg3200.txt +dreary! pg3200.txt +dreary, pg3200.txt +dredge pg3200.txt +dregs pg3200.txt, pg100.txt +dregs, pg3200.txt +dregs: pg100.txt +drench pg3200.txt, pg100.txt +drench'd pg100.txt +drenched pg3200.txt +drenching pg3200.txt +drenching, pg3200.txt +drenchings pg3200.txt +dresden pg31100.txt +dresden. pg3200.txt +dress pg31100.txt, pg3200.txt, pg100.txt +dress!" pg3200.txt +dress'd pg100.txt +dress'd! pg100.txt +dress'd, pg100.txt +dress'd. pg100.txt +dress, pg31100.txt, pg3200.txt, pg100.txt +dress,--show pg31100.txt +dress--a pg3200.txt +dress--as pg3200.txt +dress. pg31100.txt, pg3200.txt +dress." pg31100.txt, pg3200.txt +dress.] pg3200.txt +dress; pg3200.txt +dress?" pg3200.txt +dressed pg31100.txt, pg3200.txt, pg100.txt +dressed" pg31100.txt +dressed, pg31100.txt, pg3200.txt +dressed. pg3200.txt +dressed." pg31100.txt +dressed; pg31100.txt +dressed?" pg3200.txt +dressee pg3200.txt +dresser pg100.txt +dresses pg31100.txt, pg3200.txt +dressing pg31100.txt, pg3200.txt +dressing, pg3200.txt +dressing-closet pg31100.txt +dressing-closets; pg31100.txt +dressing-room." pg31100.txt +dressing-rooms, pg31100.txt +dressing. pg3200.txt +dressy pg31100.txt, pg3200.txt +drest, pg3200.txt +drew pg31100.txt, pg3200.txt, pg100.txt +drew. pg100.txt +drew; pg100.txt +dreyfus, pg3200.txt +dreyfus.' pg3200.txt +driblets pg3200.txt +dried pg3200.txt, pg100.txt +dried, pg3200.txt +dried-apple-pie pg3200.txt +dried. pg3200.txt +dried." pg3200.txt +drier pg100.txt +dries pg3200.txt, pg100.txt +drift pg3200.txt, pg100.txt +drift, pg3200.txt, pg100.txt +drift. pg100.txt +drift; pg100.txt +drifted pg3200.txt +drifting pg3200.txt +drill pg3200.txt +drill. pg3200.txt +drilled pg3200.txt +drilled, pg3200.txt +drink pg3200.txt, pg100.txt +drink! pg100.txt +drink!" pg3200.txt +drink, pg31100.txt, pg3200.txt, pg100.txt +drink. pg3200.txt, pg100.txt +drink." pg100.txt +drink; pg3200.txt +drink? pg3200.txt, pg100.txt +drink?" pg3200.txt +drinker pg3200.txt +drinketh. pg100.txt +drinking pg31100.txt, pg3200.txt, pg100.txt +drinking, pg3200.txt +drinking. pg3200.txt, pg100.txt +drinking; pg100.txt +drinking? pg100.txt +drinking?" pg31100.txt +drinks pg31100.txt, pg3200.txt, pg100.txt +drinks, pg3200.txt +drinks--" pg3200.txt +drinks. pg100.txt +drinks; pg100.txt +drinks] pg100.txt +drip pg3200.txt +drip) pg3200.txt +drip. pg3200.txt +dripped pg3200.txt +dripping, pg3200.txt +driscoll pg3200.txt +driscoll's pg3200.txt +driscoll, pg3200.txt +driscoll," pg3200.txt +driscoll. pg3200.txt +driscoll." pg3200.txt +driscoll; pg3200.txt +driscoll?" pg3200.txt +drive pg31100.txt, pg3200.txt, pg100.txt +drive, pg31100.txt, pg3200.txt +drive-so pg3200.txt +drive. pg31100.txt, pg3200.txt +drive." pg31100.txt +drive; pg31100.txt, pg3200.txt +drivel pg3200.txt +drivel. pg3200.txt +driveling pg3200.txt +driven pg31100.txt, pg3200.txt, pg100.txt +driven, pg3200.txt, pg100.txt +driven; pg100.txt +driver pg31100.txt, pg3200.txt +driver's pg3200.txt +driver, pg3200.txt +driver--the pg3200.txt +driver. pg3200.txt +driver." pg3200.txt +driver: pg3200.txt +drivers pg3200.txt +drivers, pg3200.txt +drivers. pg3200.txt +drives pg3200.txt, pg100.txt +drives. pg3200.txt, pg100.txt +driving pg31100.txt, pg3200.txt +driving, pg31100.txt, pg3200.txt +driving. pg3200.txt +drizzles pg100.txt +droll!" pg31100.txt +dromedaries pg3200.txt +dromedaries, pg3200.txt +dromio pg100.txt +dromio. pg100.txt +dromio? pg100.txt +dromio] pg100.txt +droned pg3200.txt +droop! pg100.txt +drooped pg3200.txt +drooping pg3200.txt, pg100.txt +drooping. pg100.txt +droopy; pg3200.txt +drop pg31100.txt, pg3200.txt, pg100.txt +drop!" pg3200.txt +drop' pg3200.txt +drop, pg3200.txt, pg100.txt +drop. pg3200.txt, pg100.txt +droplets pg100.txt +dropp'd pg100.txt +dropp'd, pg100.txt +dropp'd. pg100.txt +dropped pg31100.txt, pg3200.txt +dropped, pg3200.txt +dropped. pg31100.txt, pg3200.txt +dropped." pg3200.txt +dropping pg3200.txt +drops pg3200.txt, pg100.txt +drops! pg100.txt +drops, pg100.txt +drops- pg100.txt +drops. pg100.txt +drops? pg100.txt +dropt. pg31100.txt +dross, pg100.txt +dross." pg3200.txt +dross; pg100.txt +drouths, pg3200.txt +drove pg31100.txt, pg3200.txt, pg100.txt +drown pg31100.txt, pg3200.txt, pg100.txt +drown'd pg100.txt +drown'd, pg100.txt +drown'd. pg100.txt +drown'd; pg100.txt +drown'd? pg100.txt +drown, pg100.txt +drown." pg3200.txt +drown; pg3200.txt +drownd pg3200.txt +drownded!" pg3200.txt +drownded, pg3200.txt +drownded. pg3200.txt +drownded." pg3200.txt +drownded: pg3200.txt +drowned pg3200.txt +drowned! pg3200.txt +drowned, pg3200.txt +drowned. pg3200.txt +drowned." pg3200.txt +drowning pg3200.txt, pg100.txt +drowning, pg3200.txt +drowning. pg3200.txt +drowns pg100.txt +drowns. pg3200.txt +drowse pg3200.txt +drowse, pg3200.txt, pg100.txt +drowsed pg3200.txt +drowsed. pg3200.txt +drowsily pg3200.txt +drowsily-- pg3200.txt +drowsily? pg100.txt +drowsiness pg3200.txt +drowsiness: pg3200.txt +drowsing pg3200.txt +drowsy pg3200.txt +drowsy, pg3200.txt, pg100.txt +drubbed, pg3200.txt +drudge pg100.txt +drudge! pg100.txt +drudges! pg100.txt +drug pg3200.txt, pg100.txt +drug, pg3200.txt +druggists, pg3200.txt +drugs pg3200.txt, pg100.txt +drugs? pg100.txt +drum pg100.txt +drum! pg100.txt +drum's pg100.txt +drum, pg3200.txt, pg100.txt +drum--on pg3200.txt +drum-beat, pg3200.txt +drum. pg100.txt +drum; pg100.txt +drum? pg100.txt +drummed pg3200.txt +drummer-boy, pg3200.txt +drummond pg31100.txt +drummond, pg31100.txt +drums pg3200.txt, pg100.txt +drums! pg100.txt +drums, pg100.txt +drums--notes pg3200.txt +drums. pg100.txt +drums.' pg100.txt +drums] pg100.txt +drunk pg31100.txt, pg3200.txt, pg100.txt +drunk! pg100.txt +drunk'st pg100.txt +drunk, pg3200.txt, pg100.txt +drunk,--as pg3200.txt +drunk--' pg3200.txt +drunk--not pg3200.txt +drunk. pg3200.txt, pg100.txt +drunk." pg3200.txt +drunk.' pg3200.txt +drunk: pg100.txt +drunk; pg3200.txt, pg100.txt +drunk? pg3200.txt, pg100.txt +drunk?" pg3200.txt +drunk?' pg3200.txt +drunkard pg3200.txt +drunkard! pg100.txt +drunkard, pg3200.txt +drunkard-- pg3200.txt +drunkards pg100.txt +drunkards, pg3200.txt, pg100.txt +drunkards. pg100.txt +drunken pg31100.txt, pg3200.txt, pg100.txt +drunken, pg3200.txt +drunkenness'- pg100.txt +drunkenness, pg3200.txt, pg100.txt +drunkest pg3200.txt +drunkest. pg3200.txt +drury pg3200.txt +druther pg3200.txt +dry pg31100.txt, pg3200.txt, pg100.txt +dry!" pg31100.txt +dry, pg3200.txt, pg100.txt +dry- pg100.txt +dry-goods pg3200.txt +dry-nurse pg3200.txt +dry-shod. pg3200.txt +dry. pg3200.txt, pg100.txt +dry." pg3200.txt +dryer, pg31100.txt +drying, pg3200.txt +drying.' pg3200.txt +du pg3200.txt +du-- pg3200.txt +du--vin!" pg3200.txt +duality pg3200.txt +dubbed pg3200.txt +dubbonese pg3200.txt +dubiety. pg3200.txt +dubious pg3200.txt +dubious, pg31100.txt +dubiousness pg3200.txt +dublin. pg3200.txt +dublin? pg3200.txt +dubuque pg3200.txt +ducal pg3200.txt +ducal: pg3200.txt +ducat, pg100.txt +ducats pg3200.txt, pg100.txt +ducats! pg100.txt +ducats, pg100.txt +ducats--mine pg3200.txt +ducats. pg3200.txt, pg100.txt +ducats.' pg100.txt +ducats? pg100.txt +ducats?' pg100.txt +ducdame; pg100.txt +duchess pg3200.txt, pg100.txt +duchess, pg31100.txt, pg100.txt +duchess. pg3200.txt, pg100.txt +duchesses pg3200.txt +duck, pg3200.txt +duck. pg100.txt +ducks pg3200.txt +ducks! pg100.txt +dudes, pg3200.txt +dudesses. pg3200.txt +duds pg3200.txt +due pg31100.txt, pg3200.txt, pg100.txt +due, pg3200.txt, pg100.txt +due. pg31100.txt, pg3200.txt, pg100.txt +due." pg31100.txt, pg3200.txt +due.' pg100.txt +due; pg3200.txt, pg100.txt +due? pg31100.txt, pg100.txt +duel pg3200.txt +duel, pg3200.txt +duel. pg31100.txt, pg3200.txt +duel." pg3200.txt +duel; pg3200.txt +duel?" pg3200.txt +duel] pg3200.txt +dueling pg3200.txt +dueling-ground pg3200.txt +dueling-house, pg3200.txt +dueling-place. pg3200.txt +duelists pg3200.txt +duello pg100.txt +duels pg3200.txt +duels, pg3200.txt +duenna pg31100.txt +dues pg3200.txt +dues. pg100.txt +duet pg31100.txt +duet. pg3200.txt +duets pg3200.txt +duets, pg31100.txt +duff pg3200.txt +duffer!' pg3200.txt +dug pg3200.txt +dug! pg100.txt +dug, pg100.txt +dug-outs pg3200.txt +duke pg31100.txt, pg3200.txt, pg100.txt +duke! pg100.txt +duke!" pg3200.txt +duke's pg3200.txt, pg100.txt +duke's, pg3200.txt +duke's. pg100.txt +duke, pg3200.txt, pg100.txt +duke- pg100.txt +duke--and pg3200.txt +duke. pg3200.txt, pg100.txt +duke: pg100.txt +duke; pg3200.txt, pg100.txt +duke? pg3200.txt, pg100.txt +duke?" pg3200.txt +duke] pg100.txt +dukedom pg100.txt +dukedom, pg100.txt +dukedom. pg100.txt +dukedom; pg100.txt +dukedom? pg100.txt +dukedoms, pg100.txt +dukedoms. pg100.txt +dukelets, pg3200.txt +dukelets. pg3200.txt +dukes pg3200.txt, pg100.txt +dukes, pg100.txt +dukes? pg3200.txt, pg100.txt +dulcet pg100.txt +dulcet, pg100.txt +dulche. pg100.txt +dull pg31100.txt, pg3200.txt, pg100.txt +dull! pg3200.txt +dull, pg31100.txt, pg3200.txt, pg100.txt +dull. pg31100.txt, pg3200.txt, pg100.txt +dull; pg31100.txt, pg100.txt +dulled pg3200.txt +dulled, pg3200.txt +duller pg100.txt +dulling pg3200.txt +dullness pg3200.txt, pg100.txt +dullness, pg100.txt +dullness- pg100.txt +dully pg3200.txt +dulness pg3200.txt +dulness: pg100.txt +duluth pg3200.txt +duly pg31100.txt, pg3200.txt +duly. pg100.txt +dumain pg100.txt +dumain, pg100.txt +dumain? pg100.txt +dumas' pg3200.txt +dumb pg3200.txt, pg100.txt +dumb! pg3200.txt +dumb, pg3200.txt, pg100.txt +dumb--he pg3200.txt +dumb-bells, pg3200.txt +dumb-show pg3200.txt +dumb-show--three pg3200.txt +dumb. pg3200.txt, pg100.txt +dumb; pg3200.txt +dumb? pg100.txt +dumbness, pg100.txt +dumbness. pg100.txt +dumfounded. pg3200.txt +dummy pg3200.txt +dummy!" pg3200.txt +dummy, pg3200.txt +dummy. pg3200.txt +dummy; pg3200.txt +dump pg3200.txt +dump-pile, pg3200.txt +dumped pg3200.txt +dumping pg3200.txt +dumps- pg100.txt +dumps." pg3200.txt +dumps? pg100.txt +dun: pg100.txt +duncan pg3200.txt, pg100.txt +duncan, pg100.txt +duncan." pg3200.txt +duneka: pg3200.txt +dunfermline pg3200.txt +dung pg3200.txt +dung-cart; pg31100.txt +dungeon pg3200.txt +dungeon, pg3200.txt, pg100.txt +dungeon. pg3200.txt, pg100.txt +dungeon." pg3200.txt +dungeon; pg3200.txt +dungeon? pg3200.txt +dungeon] pg3200.txt +dungeons pg3200.txt +dungeons, pg3200.txt +dungeons." pg3200.txt +dunghill! pg100.txt +dunghills, pg100.txt +dunhams pg3200.txt +dunker pg3200.txt +dunlap pg3200.txt +dunlap!" pg3200.txt +dunlap's pg3200.txt +dunlap, pg3200.txt +dunlap." pg3200.txt +dunlap?" pg3200.txt +dunlaps pg3200.txt +dunlaps?" pg3200.txt +dunois pg3200.txt +duns pg3200.txt +dunsinane pg100.txt +dunsinane, pg100.txt +dunsinane. pg100.txt +duodecimo pg3200.txt +dupanloup, pg3200.txt +dupe pg3200.txt +dupe, pg31100.txt +duped pg31100.txt +duped; pg31100.txt +dupes pg3200.txt +duplicate pg31100.txt, pg3200.txt +duplicate. pg3200.txt +duplicated pg3200.txt +duplicates pg3200.txt +duplicates. pg3200.txt +duplicity pg31100.txt +dur- pg3200.txt +durable; pg31100.txt +durance? pg100.txt +durand pg3200.txt +duration pg3200.txt +duration, pg31100.txt, pg3200.txt +duration. pg3200.txt +duration." pg31100.txt +duration; pg31100.txt +durban pg3200.txt +durbar pg3200.txt +durham-- pg3200.txt +during pg31100.txt, pg3200.txt, pg100.txt +durst pg100.txt +durst; pg100.txt +durward? pg3200.txt +dusenheimer pg3200.txt +dusk pg3200.txt +dusky pg3200.txt +dusky, pg3200.txt +dust pg31100.txt, pg3200.txt, pg100.txt +dust! pg100.txt +dust' pg3200.txt +dust, pg31100.txt, pg3200.txt, pg100.txt +dust,' pg3200.txt +dust--effect pg3200.txt +dust-drift pg3200.txt +dust. pg3200.txt, pg100.txt +dust.' pg3200.txt +dust: pg3200.txt +dust; pg100.txt +dust? pg100.txt +dusted pg3200.txt +dusting pg3200.txt +dusty pg3200.txt +dusty- pg3200.txt +dutch pg3200.txt +dutch, pg100.txt +dutch--to pg3200.txt +dutch. pg3200.txt +dutchman pg3200.txt +dutchy's pg3200.txt +dutchy, pg3200.txt +dutiable pg3200.txt +duties pg31100.txt, pg3200.txt, pg100.txt +duties, pg31100.txt, pg3200.txt +duties. pg31100.txt, pg3200.txt, pg100.txt +duties." pg3200.txt +duties; pg3200.txt, pg100.txt +duties?" pg3200.txt +dutiful pg31100.txt +dutiful? pg100.txt +dutifully pg31100.txt +dutton"--particularly pg3200.txt +duty pg31100.txt, pg3200.txt, pg100.txt +duty! pg100.txt +duty, pg31100.txt, pg3200.txt, pg100.txt +duty," pg3200.txt +duty- pg100.txt +duty--which pg3200.txt +duty-call pg3200.txt +duty. pg31100.txt, pg3200.txt, pg100.txt +duty." pg31100.txt, pg3200.txt +duty; pg31100.txt, pg3200.txt, pg100.txt +duty? pg100.txt +duval pg3200.txt +dvorak, pg3200.txt +dwarf pg3200.txt +dwarf! pg100.txt +dwarf, pg3200.txt +dwarf. pg100.txt +dwarf; pg100.txt +dwarfed pg3200.txt +dwarfish! pg100.txt +dwarfs, pg3200.txt +dwell pg31100.txt, pg3200.txt, pg100.txt +dwell, pg100.txt +dwell--' pg3200.txt +dwell: pg100.txt +dwell? pg100.txt +dwelling pg31100.txt, pg3200.txt +dwelling, pg3200.txt +dwelling- pg3200.txt +dwelling-house pg3200.txt +dwelling-house, pg100.txt +dwelling-house-it pg3200.txt +dwelling-place pg3200.txt +dwelling-place. pg100.txt +dwelling. pg100.txt +dwelling." pg31100.txt +dwelling; pg31100.txt +dwellings pg3200.txt +dwellings. pg3200.txt +dwellings; pg3200.txt +dwells pg3200.txt, pg100.txt +dwells, pg3200.txt, pg100.txt +dwells; pg100.txt +dwelt pg31100.txt, pg3200.txt +dwelt, pg3200.txt +dwindle pg3200.txt +dwindled pg3200.txt +dy'd pg100.txt +dye pg100.txt +dye, pg100.txt +dyed. pg100.txt +dyed? pg100.txt +dying pg31100.txt, pg3200.txt +dying!" pg3200.txt +dying, pg31100.txt, pg3200.txt, pg100.txt +dying. pg3200.txt, pg100.txt +dying." pg3200.txt +dying; pg3200.txt, pg100.txt +dykes, pg3200.txt +dykes. pg3200.txt +dynamic pg3200.txt +dynamite pg3200.txt +dynamite, pg3200.txt +dynamite. pg3200.txt +dynamo pg3200.txt +dynamo; pg3200.txt +dynasties, pg3200.txt +dynasty pg3200.txt +dynasty!" pg3200.txt +dynasty, pg3200.txt +dynasty. pg3200.txt +e'en pg3200.txt +e'er pg3200.txt, pg100.txt +e'il pg100.txt +e, pg3200.txt, pg100.txt +e. pg3200.txt +e.'s, pg31100.txt +e.) pg3200.txt +e., pg31100.txt +each pg31100.txt, pg3200.txt, pg100.txt +each), pg3200.txt +each, pg31100.txt, pg3200.txt, pg100.txt +each--gratis; pg3200.txt +each--meaningless, pg3200.txt +each. pg31100.txt, pg3200.txt +each." pg3200.txt +each; pg31100.txt, pg3200.txt, pg100.txt +each? pg100.txt +eager pg31100.txt, pg3200.txt +eager, pg3200.txt +eager--and pg3200.txt +eager. pg100.txt +eagerly pg31100.txt, pg3200.txt +eagerly, pg3200.txt +eagerly-- pg3200.txt +eagerly--if pg3200.txt +eagerly. pg3200.txt +eagerly: pg3200.txt +eagerly; pg31100.txt +eagerness pg31100.txt, pg3200.txt +eagerness, pg3200.txt +eagerness--"never, pg31100.txt +eagerness. pg31100.txt, pg3200.txt +eagerness; pg31100.txt, pg3200.txt +eagle pg3200.txt, pg100.txt +eagle, pg3200.txt, pg100.txt +eagle. pg3200.txt, pg100.txt +eagles. pg100.txt +ealer pg3200.txt +eaning pg100.txt +ear pg31100.txt, pg3200.txt, pg100.txt +ear! pg100.txt +ear, pg31100.txt, pg3200.txt, pg100.txt +ear- pg100.txt +ear-- pg3200.txt +ear--another pg3200.txt +ear-marks pg3200.txt +ear-rings, pg3200.txt +ear-socket pg3200.txt +ear-splitting pg3200.txt +ear-trumpets pg3200.txt +ear. pg31100.txt, pg3200.txt, pg100.txt +ear." pg3200.txt +ear: pg31100.txt, pg3200.txt, pg100.txt +ear; pg3200.txt, pg100.txt +ear? pg100.txt +ear?" pg3200.txt +ear] pg100.txt +earl pg31100.txt, pg3200.txt, pg100.txt +earl! pg3200.txt +earl!" pg3200.txt +earl's pg3200.txt +earl, pg3200.txt, pg100.txt +earl--" pg3200.txt +earl. pg100.txt +earl." pg3200.txt +earl; pg3200.txt, pg100.txt +earl? pg100.txt +earl?" pg3200.txt +earldom pg3200.txt +earldom, pg3200.txt +earldom- pg100.txt +earldom." pg3200.txt +earlier pg31100.txt, pg3200.txt +earlier! pg31100.txt +earlier!" pg3200.txt +earlier, pg31100.txt, pg3200.txt +earlier. pg31100.txt, pg3200.txt +earlier." pg31100.txt +earlier.--but pg3200.txt +earlier; pg3200.txt +earlier?" pg3200.txt +earlier?' pg3200.txt +earliest pg31100.txt, pg3200.txt, pg100.txt +earliest--else pg3200.txt +earls, pg100.txt +early pg31100.txt, pg3200.txt, pg100.txt +early, pg31100.txt, pg3200.txt, pg100.txt +early--still pg3200.txt +early--when pg3200.txt +early. pg31100.txt, pg3200.txt, pg100.txt +early." pg31100.txt, pg3200.txt +early.' pg100.txt +early; pg3200.txt, pg100.txt +early? pg100.txt +earmarks pg3200.txt +earn pg3200.txt, pg100.txt +earn. pg31100.txt, pg100.txt +earned pg31100.txt, pg3200.txt +earned, pg3200.txt +earned. pg3200.txt +earned." pg31100.txt +earnest pg31100.txt, pg3200.txt, pg100.txt +earnest, pg31100.txt, pg3200.txt +earnest. pg31100.txt, pg3200.txt, pg100.txt +earnest." pg3200.txt +earnest: pg3200.txt +earnest? pg3200.txt, pg100.txt +earnest?" pg3200.txt +earnest?' pg3200.txt +earnestly pg31100.txt, pg3200.txt, pg100.txt +earnestly, pg3200.txt +earnestly. pg3200.txt +earnestly: pg3200.txt +earnestly? pg100.txt +earnestness pg31100.txt, pg3200.txt +earnestness, pg31100.txt, pg3200.txt, pg100.txt +earnestness. pg31100.txt, pg3200.txt +earnestness: pg3200.txt +earnestness; pg31100.txt +earning pg3200.txt +earnings pg3200.txt +earnings. pg3200.txt +earns pg3200.txt +earring pg3200.txt +earring. pg3200.txt +earrings?" pg3200.txt +ears pg31100.txt, pg3200.txt, pg100.txt +ears! pg3200.txt, pg100.txt +ears!' pg3200.txt +ears, pg3200.txt, pg100.txt +ears--any pg3200.txt +ears. pg3200.txt, pg100.txt +ears." pg31100.txt +ears: pg3200.txt +ears; pg3200.txt, pg100.txt +ears? pg100.txt +ears] pg100.txt +earth pg3200.txt, pg100.txt +earth! pg3200.txt, pg100.txt +earth!" pg3200.txt +earth!--of pg31100.txt +earth" pg3200.txt +earth's." pg3200.txt +earth, pg31100.txt, pg3200.txt, pg100.txt +earth," pg3200.txt +earth--" pg3200.txt +earth--ah, pg3200.txt +earth--and pg3200.txt +earth--or pg3200.txt +earth-institutions pg3200.txt +earth. pg3200.txt, pg100.txt +earth." pg31100.txt, pg3200.txt +earth.' pg3200.txt +earth.-- pg3200.txt +earth: pg3200.txt +earth; pg3200.txt, pg100.txt +earth? pg3200.txt, pg100.txt +earth?" pg3200.txt +earthly pg3200.txt +earthly. pg3200.txt, pg100.txt +earthquake pg3200.txt +earthquake, pg3200.txt +earthquake," pg3200.txt +earthquake--reportorial pg3200.txt +earthquake. pg3200.txt +earthquake? pg3200.txt +earthquakes pg3200.txt +earthquakes. pg3200.txt +eas'd pg100.txt +ease pg31100.txt, pg3200.txt, pg100.txt +ease!" pg3200.txt +ease'! pg100.txt +ease'', pg100.txt +ease, pg31100.txt, pg3200.txt, pg100.txt +ease. pg31100.txt, pg3200.txt, pg100.txt +ease.' pg100.txt +ease; pg31100.txt, pg100.txt +eased pg3200.txt +eased-up, pg3200.txt +eased; pg100.txt +easeful pg3200.txt +eases, pg100.txt +easier pg31100.txt, pg3200.txt +easier, pg3200.txt +easier. pg31100.txt, pg3200.txt, pg100.txt +easier; pg3200.txt +easiest pg31100.txt +easily pg31100.txt, pg3200.txt +easily, pg3200.txt +easily--and pg3200.txt +easily. pg31100.txt, pg3200.txt, pg100.txt +easily." pg31100.txt +easiness pg31100.txt, pg100.txt +easiness. pg100.txt +easing pg3200.txt, pg100.txt +east pg31100.txt, pg3200.txt, pg100.txt +east!" pg3200.txt +east, pg3200.txt, pg100.txt +east," pg3200.txt +east,) pg3200.txt +east--this pg3200.txt +east-south-east pg3200.txt +east. pg3200.txt, pg100.txt +east." pg3200.txt +east; pg3200.txt, pg100.txt +east? pg31100.txt, pg3200.txt +east?" pg3200.txt +eastbourne pg31100.txt +eastcheap pg100.txt +eastcheap. pg100.txt +easter, pg31100.txt, pg3200.txt +easter-day, pg31100.txt +easter; pg31100.txt +eastern pg3200.txt +easting pg3200.txt +eastward pg3200.txt +eastward, pg3200.txt +eastward-bound pg3200.txt +easy pg31100.txt, pg3200.txt, pg100.txt +easy" pg3200.txt +easy, pg31100.txt, pg3200.txt +easy," pg3200.txt +easy--. pg31100.txt +easy-divorce pg3200.txt +easy-going pg3200.txt +easy-going, pg3200.txt +easy. pg31100.txt, pg3200.txt, pg100.txt +easy." pg31100.txt, pg3200.txt +easy; pg31100.txt, pg3200.txt, pg100.txt +easy? pg100.txt +easy?" pg3200.txt +eat! pg100.txt +eat" pg3200.txt +eat, pg3200.txt, pg100.txt +eat. pg31100.txt, pg3200.txt, pg100.txt +eat." pg31100.txt, pg3200.txt +eat; pg31100.txt, pg3200.txt, pg100.txt +eat;--and pg31100.txt +eat? pg31100.txt +eat?" pg3200.txt +eat?' pg3200.txt +eatable, pg31100.txt +eatable--hautboys pg31100.txt +eatables--monkeys--the pg3200.txt +eaten pg3200.txt, pg100.txt +eaten. pg3200.txt, pg100.txt +eaten; pg31100.txt +eating pg31100.txt, pg3200.txt +eating, pg31100.txt, pg3200.txt +eating-room pg3200.txt +eating-saloon. pg3200.txt +eating. pg100.txt +eats pg31100.txt, pg3200.txt +eats, pg3200.txt, pg100.txt +eats--so pg31100.txt +eaves pg3200.txt +eaves, pg3200.txt +eaves--when pg3200.txt +eaves-dropper, pg100.txt +eavesdropping pg3200.txt +eavesdropping:-- pg3200.txt +ebal pg3200.txt +ebb pg100.txt +ebb'd, pg100.txt +ebb, pg100.txt +ebb. pg100.txt +ebb? pg100.txt +ebbs, pg100.txt +eben pg3200.txt +ebony pg31100.txt +ebony. pg100.txt +ebony; pg100.txt +ebook's pg100.txt +ebooks, pg31100.txt, pg3200.txt, pg100.txt +ebooks. pg31100.txt, pg3200.txt, pg100.txt +ebooks: pg100.txt +eccentric, pg31100.txt, pg3200.txt +eccentric;--and pg31100.txt +eccentricities pg3200.txt +eccentricities, pg3200.txt +eccentricities. pg3200.txt +eccentrics, pg3200.txt +ecclesford pg31100.txt +ecclesford, pg31100.txt +ecclesiastic pg3200.txt +ecclesiastical pg3200.txt +ecclesiasticism, pg3200.txt +ecclesiastics pg3200.txt +echauffer, pg3200.txt +echo pg31100.txt, pg3200.txt +echo, pg3200.txt, pg100.txt +echo. pg100.txt +echoed pg31100.txt +echoed: pg3200.txt +echoes pg3200.txt +echoes, pg3200.txt +echoes. pg3200.txt +echoing pg3200.txt +eckert pg3200.txt +eckert. pg3200.txt +eclat pg3200.txt +eclat. pg31100.txt, pg3200.txt +eclips'd. pg100.txt +eclipse pg3200.txt, pg100.txt +eclipse, pg100.txt +eclipse. pg3200.txt, pg100.txt +eclipses. pg100.txt +economical pg3200.txt +economical. pg3200.txt +economies pg3200.txt +economies, pg3200.txt +economies. pg3200.txt +economize pg3200.txt +economize. pg3200.txt +economized pg3200.txt +economized. pg3200.txt +economy pg3200.txt +economy, pg31100.txt, pg3200.txt +economy. pg31100.txt, pg3200.txt +economy." pg3200.txt +ecstacy! pg3200.txt +ecstacy. pg3200.txt +ecstasies pg3200.txt, pg100.txt +ecstasies. pg3200.txt +ecstasy pg31100.txt, pg3200.txt, pg100.txt +ecstasy!" pg3200.txt +ecstasy!--' pg3200.txt +ecstasy, pg3200.txt, pg100.txt +ecstasy. pg31100.txt, pg3200.txt, pg100.txt +ecstasy." pg3200.txt +ecstasy; pg3200.txt, pg100.txt +ecstasy? pg100.txt +ecstatic pg3200.txt +ecus. pg100.txt +ed pg3200.txt +eddie. pg3200.txt +eddy pg3200.txt +eddy's pg3200.txt +eddy's. pg3200.txt +eddy), pg3200.txt +eddy, pg3200.txt +eddy. pg3200.txt +eddy." pg3200.txt +eddy.... pg3200.txt +eddyties pg3200.txt +edelweiss. pg3200.txt +eden, pg3200.txt +eden. pg3200.txt +edgar's pg31100.txt +edgar, pg100.txt +edgar- pg100.txt +edgar. pg100.txt +edgar.] pg100.txt +edgar? pg100.txt +edgar]. pg100.txt +edge pg31100.txt, pg3200.txt, pg100.txt +edge, pg3200.txt, pg100.txt +edge. pg3200.txt, pg100.txt +edge; pg100.txt +edge? pg100.txt +edged pg3200.txt, pg100.txt +edges pg3200.txt +edges! pg3200.txt +edges. pg3200.txt +edgeways pg3200.txt +edgeways, pg3200.txt +edict pg3200.txt +edict, pg100.txt +edicts pg3200.txt +edifice pg31100.txt, pg3200.txt +edifice, pg3200.txt +edifices pg3200.txt +edinburgh pg3200.txt +edinburgh, pg31100.txt, pg3200.txt +edinburgh. pg3200.txt +edinburgh: pg3200.txt +edison pg3200.txt +edison's; pg3200.txt +edisonially. pg3200.txt +edit pg3200.txt +edited pg3200.txt +edith pg3200.txt +edith, pg3200.txt +edith--" pg3200.txt +editing, pg3200.txt +edition pg3200.txt +edition. pg31100.txt, pg3200.txt, pg100.txt +edition." pg3200.txt +edition: pg3200.txt +edition] pg3200.txt +editions pg31100.txt, pg3200.txt, pg100.txt +editions. pg3200.txt +editor pg3200.txt +editor's pg3200.txt +editor, pg3200.txt +editor. pg3200.txt +editor: pg3200.txt +editor; pg3200.txt +editor?" pg3200.txt +editorial pg3200.txt +editorial, pg3200.txt +editorials pg3200.txt +editorials. pg3200.txt +editors pg3200.txt +edits pg3200.txt +edmund pg31100.txt, pg3200.txt, pg100.txt +edmund' pg31100.txt +edmund's pg31100.txt +edmund's; pg31100.txt +edmund, pg31100.txt, pg100.txt +edmund--by pg31100.txt +edmund. pg31100.txt, pg100.txt +edmund." pg31100.txt +edmund; pg31100.txt, pg100.txt +edmund? pg31100.txt, pg100.txt +edmund]. pg100.txt +edmunds pg3200.txt, pg100.txt +edmundsbury pg100.txt +edmundsbury; pg100.txt +educate pg31100.txt, pg3200.txt +educated pg31100.txt, pg3200.txt +educated, pg31100.txt +educated. pg3200.txt +educated?" pg3200.txt +educating pg3200.txt +education pg31100.txt, pg3200.txt, pg100.txt +education!" pg31100.txt +education, pg31100.txt, pg3200.txt +education. pg3200.txt +education." pg31100.txt +education: pg31100.txt, pg100.txt +education; pg31100.txt, pg3200.txt, pg100.txt +educational pg3200.txt +edward pg31100.txt, pg3200.txt, pg100.txt +edward! pg31100.txt, pg100.txt +edward!" pg31100.txt, pg3200.txt +edward!--but pg31100.txt +edward's pg31100.txt, pg100.txt +edward's, pg31100.txt, pg100.txt +edward's- pg100.txt +edward's. pg31100.txt, pg100.txt +edward's; pg100.txt +edward, pg31100.txt, pg3200.txt, pg100.txt +edward- pg100.txt +edward. pg31100.txt, pg3200.txt, pg100.txt +edward." pg31100.txt, pg3200.txt +edward._] pg31100.txt +edward; pg100.txt +edward? pg100.txt +edward?" pg31100.txt +edwards pg3200.txt +edwin pg3200.txt +edwitha pg3200.txt +edwitha, pg3200.txt +eel pg100.txt +eels pg100.txt +efface pg3200.txt +effect pg31100.txt, pg3200.txt, pg100.txt +effect! pg100.txt +effect, pg3200.txt, pg100.txt +effect- pg100.txt +effect-- pg3200.txt +effect--' pg3200.txt +effect--sixty-one pg3200.txt +effect--the pg3200.txt +effect. pg31100.txt, pg3200.txt, pg100.txt +effect." pg31100.txt, pg3200.txt +effect: pg31100.txt, pg3200.txt, pg100.txt +effect:-- pg31100.txt, pg3200.txt +effect; pg31100.txt, pg3200.txt, pg100.txt +effect? pg100.txt +effect?" pg3200.txt +effected pg100.txt +effected. pg3200.txt, pg100.txt +effected: pg100.txt +effected; pg100.txt +effecting pg31100.txt +effective pg3200.txt +effective. pg3200.txt +effectively pg3200.txt +effectively. pg3200.txt +effects pg31100.txt, pg3200.txt, pg100.txt +effects, pg31100.txt, pg3200.txt, pg100.txt +effects,' pg3200.txt +effects--and pg3200.txt +effects. pg31100.txt, pg3200.txt, pg100.txt +effects; pg3200.txt +effectual pg3200.txt +effectual, pg100.txt +effectual. pg100.txt +effectual; pg100.txt +effectually pg31100.txt, pg3200.txt +effectually, pg3200.txt +effeminate pg3200.txt, pg100.txt +effeminate, pg3200.txt, pg100.txt +effete." pg3200.txt +efficacious pg3200.txt +efficacy pg31100.txt, pg3200.txt +efficiency--and pg3200.txt +efficiency. pg3200.txt +efficient pg3200.txt +efficiently pg3200.txt +effigy pg3200.txt +efflorescence pg3200.txt +effluvia pg3200.txt +effort pg31100.txt, pg3200.txt +effort, pg31100.txt, pg3200.txt +effort---- pg3200.txt +effort. pg31100.txt, pg3200.txt +effort." pg31100.txt +effort: pg3200.txt +effort; pg3200.txt +effort? pg3200.txt +efforts pg3200.txt +efforts, pg31100.txt, pg3200.txt +efforts. pg3200.txt +efforts." pg3200.txt +efforts?" pg3200.txt +effrontery pg3200.txt +effulgence pg3200.txt +effus'd, pg100.txt +effusion pg3200.txt +effusion, pg3200.txt +effusion-- pg3200.txt +effusion. pg3200.txt +effusion: pg3200.txt +effusions pg31100.txt +eftsoons." pg3200.txt +egg pg31100.txt, pg3200.txt, pg100.txt +egg! pg100.txt +egg"; pg3200.txt +egg, pg3200.txt, pg100.txt +egg-shells pg31100.txt +egg-shells, pg3200.txt +egg. pg3200.txt, pg100.txt +egglame pg3200.txt +eggs pg3200.txt, pg100.txt +eggs, pg3200.txt, pg100.txt +eggs. pg3200.txt +eggs." pg3200.txt +eggs; pg3200.txt +eggs?" pg3200.txt +eglamour pg100.txt +eglamour, pg100.txt +eglamour? pg100.txt +eglantine; pg100.txt +egnog. pg3200.txt +egotism, pg3200.txt +egotistic pg3200.txt +egotistical. pg3200.txt +egregious. pg31100.txt +egress pg3200.txt +egress. pg3200.txt +egypt pg3200.txt, pg100.txt +egypt's pg3200.txt +egypt, pg3200.txt, pg100.txt +egypt- pg100.txt +egypt--villages pg3200.txt +egypt. pg3200.txt, pg100.txt +egypt." pg3200.txt +egypt; pg3200.txt, pg100.txt +egypt? pg100.txt +egypta!" pg3200.txt +egyptian pg3200.txt, pg100.txt +egyptian! pg100.txt +egyptians pg100.txt +egyptians. pg3200.txt +eh?" pg3200.txt +eie, pg100.txt +eight pg31100.txt, pg3200.txt +eight, pg3200.txt +eight-and-thirty, pg31100.txt +eight-and-thirty. pg31100.txt +eight. pg3200.txt, pg100.txt +eight." pg3200.txt +eight.) pg3200.txt +eight? pg100.txt +eight?" pg3200.txt +eighteen pg31100.txt, pg3200.txt +eighteen, pg3200.txt +eighteen-foot pg3200.txt +eighteen. pg31100.txt, pg3200.txt +eighteen." pg3200.txt +eighteens. pg3200.txt +eighteenth pg3200.txt +eighth pg3200.txt, pg100.txt +eighties pg3200.txt +eightpence!" pg3200.txt +eightpence." pg3200.txt +eighty pg3200.txt +eighty- pg3200.txt +eighty-five. pg3200.txt +eighty-four pg3200.txt +eighty-one pg3200.txt +eighty-six pg3200.txt +eighty-two pg3200.txt +eighty. pg3200.txt +ein pg3200.txt +einem pg3200.txt +einen pg3200.txt +einer pg3200.txt +eingebusst pg3200.txt +einmal! pg3200.txt +either pg31100.txt, pg3200.txt, pg100.txt +either!" pg31100.txt +either, pg31100.txt, pg3200.txt, pg100.txt +either. pg31100.txt, pg3200.txt, pg100.txt +either." pg31100.txt, pg3200.txt +either.' pg3200.txt +either; pg31100.txt, pg3200.txt +either? pg3200.txt, pg100.txt +ejaculated-- pg3200.txt +ejaculated: pg3200.txt +ejaculation pg3200.txt +ejaculation-- pg3200.txt +ejaculation. pg31100.txt +ejaculation: pg3200.txt +ejaculations pg3200.txt +ejaculations--it pg3200.txt +ejaculations; pg3200.txt +eke pg100.txt +elaborate pg3200.txt +elaborate, pg3200.txt +elaborated; pg3200.txt +elaborately pg3200.txt +elaborately. pg3200.txt +elaboration. pg3200.txt +elapse pg3200.txt +elapsed pg31100.txt, pg3200.txt +elapsed, pg3200.txt +elapsed. pg3200.txt +elastic pg3200.txt +elastic, pg3200.txt +elasticity pg31100.txt +elated pg31100.txt +elba pg3200.txt +elba. pg3200.txt +elbe; pg100.txt +elbow pg3200.txt +elbow, pg31100.txt, pg3200.txt, pg100.txt +elbow--checkmate. pg3200.txt +elbow-room; pg100.txt +elbow-to-elbow pg3200.txt +elbow. pg3200.txt, pg100.txt +elbow: pg3200.txt +elbow? pg3200.txt, pg100.txt +elbow] pg100.txt +elbowed pg3200.txt +elbows pg3200.txt +elbows, pg3200.txt +eld pg100.txt +eld, pg100.txt +elder pg31100.txt, pg3200.txt, pg100.txt +elder! pg3200.txt +elder, pg3200.txt +elder- pg100.txt +elder-tree pg100.txt +elder-tree. pg100.txt +elder. pg3200.txt, pg100.txt +elderly pg3200.txt +elderly, pg3200.txt +elders pg3200.txt +elders. pg3200.txt, pg100.txt +elders; pg3200.txt +eldest pg31100.txt, pg3200.txt +eldest, pg31100.txt +eleanor pg31100.txt, pg100.txt +eleanor! pg100.txt +eleanor!" pg3200.txt +eleanor's pg31100.txt +eleanor, pg31100.txt, pg100.txt +eleanor--what pg31100.txt +eleanor. pg100.txt +eleanor." pg31100.txt +elect pg3200.txt +elect, pg3200.txt, pg100.txt +elect. pg31100.txt +elected pg3200.txt +elected, pg100.txt +elected. pg3200.txt +elected." pg3200.txt +elected; pg3200.txt +elected? pg3200.txt +electing? pg3200.txt +election pg3200.txt, pg100.txt +election, pg3200.txt, pg100.txt +election. pg3200.txt, pg100.txt +election; pg100.txt +election? pg3200.txt +elections. pg3200.txt +elective pg3200.txt +electric pg3200.txt +electrical pg3200.txt +electricity pg3200.txt +electricity. pg3200.txt +electrics--and pg3200.txt +electrified pg3200.txt +electronic pg31100.txt, pg3200.txt, pg100.txt +electronically. pg100.txt +electropaths, pg3200.txt +elects pg3200.txt +elegance pg31100.txt, pg3200.txt +elegance, pg31100.txt, pg3200.txt +elegance. pg31100.txt +elegance.-- pg31100.txt +elegancies pg31100.txt +elegant pg31100.txt, pg3200.txt +elegant! pg3200.txt +elegant, pg31100.txt +elegant. pg3200.txt +elegant." pg31100.txt +elegantly pg3200.txt +elegantly, pg3200.txt +elegies, pg100.txt +element pg3200.txt, pg100.txt +element. pg3200.txt +element?" pg3200.txt +elementals pg3200.txt +elementals, pg3200.txt +elements pg3200.txt, pg100.txt +elements, pg100.txt +elements. pg3200.txt, pg100.txt +elements; pg100.txt +elements? pg100.txt +elephant pg3200.txt +elephant!" pg3200.txt +elephant's pg3200.txt +elephant's, pg3200.txt +elephant, pg3200.txt, pg100.txt +elephant-driver pg3200.txt +elephant. pg3200.txt, pg100.txt +elephant." pg3200.txt +elephant; pg3200.txt, pg100.txt +elephant? pg3200.txt +elephant?" pg3200.txt +elephantiasis pg3200.txt +elephants pg3200.txt +elephants, pg3200.txt +elephants. pg3200.txt +elevate pg31100.txt, pg3200.txt +elevated pg31100.txt, pg3200.txt +elevating pg3200.txt +elevating. pg31100.txt +elevation pg31100.txt, pg3200.txt +elevation. pg3200.txt +elevation; pg31100.txt +elevators pg3200.txt +elevators. pg3200.txt +eleven pg31100.txt, pg3200.txt +eleven, pg3200.txt +eleven. pg3200.txt, pg100.txt +eleven." pg3200.txt +eleven; pg100.txt +eleven? pg100.txt +eleven?--a pg3200.txt +elexander pg3200.txt +elexandria pg3200.txt +elfonzo pg3200.txt +elfonzo, pg3200.txt +elfonzo," pg3200.txt +elfonzo-- pg3200.txt +elfonzo. pg3200.txt +elfonzo: pg3200.txt +elfonzo; pg3200.txt +eli pg3200.txt +elicited, pg3200.txt +eligibility pg31100.txt +eligibility, pg31100.txt +eligible pg3200.txt +eligible, pg31100.txt +eligible." pg31100.txt, pg3200.txt +elinor pg31100.txt +elinor's pg31100.txt +elinor's, pg31100.txt +elinor's. pg31100.txt +elinor, pg31100.txt +elinor-- pg31100.txt +elinor--she pg31100.txt +elinor. pg31100.txt +elinor; pg31100.txt +elinor? pg31100.txt +elinor?" pg31100.txt +elisha, pg3200.txt +elisha. pg3200.txt +eliza pg31100.txt, pg3200.txt +eliza, pg31100.txt +eliza," pg31100.txt +eliza. pg31100.txt +eliza." pg31100.txt +elizabeth pg31100.txt, pg3200.txt, pg100.txt +elizabeth! pg100.txt +elizabeth's pg31100.txt, pg3200.txt +elizabeth, pg31100.txt, pg3200.txt, pg100.txt +elizabeth-- pg31100.txt +elizabeth. pg31100.txt, pg3200.txt, pg100.txt +elizabeth." pg31100.txt, pg3200.txt +elizabeth: pg31100.txt +elizabeth; pg3200.txt +elizabethan pg3200.txt +ell, pg3200.txt +ell. pg100.txt +ella pg3200.txt +ellen? pg100.txt +elliot pg31100.txt +elliot! pg31100.txt +elliot' pg31100.txt +elliot's pg31100.txt +elliot, pg31100.txt +elliot," pg31100.txt +elliot. pg31100.txt +elliot." pg31100.txt +elliot; pg31100.txt +elliot? pg31100.txt +elliot?" pg31100.txt +elliots." pg31100.txt +elliott, pg31100.txt +ellis, pg3200.txt +ellison pg31100.txt +ellisons?" pg31100.txt +elm. pg100.txt +elmira pg3200.txt +elmira). pg3200.txt +elmira, pg3200.txt +elmira,) pg3200.txt +elmira. pg3200.txt +elmira: pg3200.txt +elms pg31100.txt +elms. pg31100.txt +eloisa pg31100.txt +eloisa. pg31100.txt +elongated pg3200.txt +elope pg3200.txt +eloped pg31100.txt +elopement pg31100.txt +elopement, pg31100.txt, pg3200.txt +elopement; pg3200.txt +elopes, pg3200.txt +eloquence pg31100.txt, pg3200.txt, pg100.txt +eloquence, pg3200.txt, pg100.txt +eloquence. pg31100.txt, pg3200.txt, pg100.txt +eloquence." pg31100.txt +eloquence; pg3200.txt +eloquent pg31100.txt, pg3200.txt +eloquent, pg3200.txt +else pg31100.txt, pg3200.txt, pg100.txt +else! pg100.txt +else!" pg3200.txt +else!-of pg100.txt +else's pg3200.txt +else's, pg3200.txt +else's--and pg3200.txt +else's. pg3200.txt +else's?" pg3200.txt +else, pg31100.txt, pg3200.txt, pg100.txt +else- pg100.txt +else--" pg31100.txt +else--and pg31100.txt +else--except pg3200.txt +else--jumbo pg3200.txt +else--like pg3200.txt +else--that pg31100.txt +else--these pg3200.txt +else. pg31100.txt, pg3200.txt, pg100.txt +else." pg31100.txt, pg3200.txt +else.' pg31100.txt +else.--and pg3200.txt +else; pg31100.txt, pg3200.txt +else? pg3200.txt, pg100.txt +else?" pg31100.txt, pg3200.txt +else?' pg3200.txt +elsewhere pg31100.txt, pg3200.txt +elsewhere, pg3200.txt, pg100.txt +elsewhere. pg31100.txt, pg3200.txt, pg100.txt +elsewhere." pg31100.txt +elsewhere: pg3200.txt +elsewhere; pg3200.txt +elsewhere?" pg31100.txt +elsinore. pg100.txt +elsinore? pg100.txt +elton pg31100.txt +elton! pg31100.txt +elton!" pg31100.txt +elton's pg31100.txt +elton, pg31100.txt +elton-- pg31100.txt +elton. pg31100.txt +elton." pg31100.txt +elton; pg31100.txt +elton?" pg31100.txt +elton?--that pg31100.txt +eltons pg31100.txt +eltons! pg31100.txt +eltons'; pg31100.txt +eltons, pg31100.txt +eltons. pg31100.txt +elucidations pg3200.txt +elude; pg31100.txt +elusive pg3200.txt +elusive; pg3200.txt +ely pg100.txt +ely, pg100.txt +elysium pg100.txt +elysium. pg100.txt +em!" pg3200.txt +emaciated pg3200.txt +email pg31100.txt, pg3200.txt, pg100.txt +emanates pg3200.txt +emancipated pg3200.txt +emancipation pg3200.txt +embalm pg3200.txt +embalmed pg3200.txt +embamming.' pg3200.txt +embankment pg3200.txt +embankment; pg3200.txt +embark pg3200.txt +embark'd pg100.txt +embark'd? pg100.txt +embarkation pg3200.txt +embarrass pg3200.txt +embarrassed pg3200.txt +embarrassed, pg3200.txt +embarrassed. pg31100.txt, pg3200.txt +embarrassed." pg31100.txt +embarrassed.'" pg3200.txt +embarrassed.--he pg31100.txt +embarrassed? pg31100.txt +embarrassing pg31100.txt, pg3200.txt +embarrassing. pg3200.txt +embarrassment pg31100.txt, pg3200.txt +embarrassment, pg31100.txt, pg3200.txt +embarrassment. pg3200.txt +embarrassment." pg3200.txt +embarrassments pg31100.txt +embarrassments, pg3200.txt +embassade, pg100.txt +embassage pg100.txt +embassage. pg100.txt +embassage; pg100.txt +embassy pg3200.txt, pg100.txt +embassy, pg100.txt +embassy. pg100.txt +embassy: pg100.txt +embassy; pg100.txt +embassy? pg100.txt +embattle pg100.txt +embellish pg3200.txt +embellished pg3200.txt, pg100.txt +embellishment: pg3200.txt +embellishments pg31100.txt +embellishments, pg3200.txt +embers, pg3200.txt +embers; pg3200.txt +embittered pg3200.txt +embittering pg3200.txt +emblem pg3200.txt, pg100.txt +emblems pg100.txt +embodied, pg3200.txt +embodied. pg31100.txt +embodiment, pg3200.txt +embodying pg3200.txt +emboldened pg3200.txt +emboss'd. pg100.txt +emboss'd; pg100.txt +embrac'd pg100.txt +embrac'd, pg100.txt +embrac'd. pg100.txt +embrace pg3200.txt, pg100.txt +embrace! pg100.txt +embrace, pg3200.txt, pg100.txt +embrace. pg3200.txt, pg100.txt +embrace." pg3200.txt +embrace; pg100.txt +embrace? pg100.txt +embrace] pg100.txt +embraced pg3200.txt +embracer."] pg3200.txt +embraces pg31100.txt, pg100.txt +embraces, pg100.txt +embraces. pg100.txt +embracing pg31100.txt, pg3200.txt, pg100.txt +embrocation pg31100.txt, pg3200.txt +embrocation, pg31100.txt +embroidered pg3200.txt +embroidering. pg3200.txt +embroidering; pg3200.txt +embroidery, pg100.txt +emeralds--emeralds pg3200.txt +emerge pg3200.txt +emerged pg3200.txt +emergence pg31100.txt +emergence. pg31100.txt +emergencies pg3200.txt +emergency pg3200.txt +emergency, pg31100.txt, pg3200.txt +emergency. pg3200.txt +emergency; pg3200.txt +emerging pg3200.txt +emeritus pg3200.txt +emeritus" pg3200.txt +emeritus, pg3200.txt +emeritus. pg3200.txt +emeritus." pg3200.txt +emeritus; pg3200.txt +emerson pg3200.txt +emerson, pg3200.txt +emigrant, pg31100.txt +emigrant." pg31100.txt +emigrants pg3200.txt +emigrate. pg3200.txt +emigrating pg3200.txt +emilia pg100.txt +emilia, pg100.txt +emilia- pg100.txt +emilia. pg100.txt +emilia; pg100.txt +emilia? pg100.txt +emily pg3200.txt +eminence pg31100.txt, pg3200.txt +eminence, pg3200.txt, pg100.txt +eminent! pg100.txt +emits pg3200.txt +emma pg31100.txt, pg3200.txt +emma's pg31100.txt +emma's. pg31100.txt +emma, pg31100.txt +emma--" pg31100.txt +emma--what pg31100.txt +emma. pg31100.txt +emma." pg31100.txt +emma."-- pg31100.txt +emma; pg31100.txt +emmanuel. pg100.txt +emmas pg31100.txt +emmeline pg31100.txt, pg3200.txt +emmeline's pg3200.txt +emmeline, pg3200.txt +emnity! pg100.txt +emnity, pg100.txt +emoluments. pg3200.txt +emoluments; pg3200.txt +emotion pg31100.txt, pg3200.txt +emotion, pg31100.txt, pg3200.txt +emotion-- pg3200.txt +emotion. pg31100.txt, pg3200.txt +emotion." pg3200.txt +emotion: pg3200.txt +emotion; pg31100.txt, pg3200.txt +emotional pg3200.txt +emotional. pg3200.txt +emotional? pg3200.txt +emotions pg31100.txt, pg3200.txt +emotions, pg31100.txt +emotions. pg3200.txt +empaneled pg3200.txt +empaneling pg3200.txt +emperess pg100.txt +emperess, pg100.txt +emperess. pg100.txt +emperial. pg100.txt +emperor pg3200.txt, pg100.txt +emperor! pg3200.txt, pg100.txt +emperor!' pg100.txt +emperor's pg3200.txt, pg100.txt +emperor's. pg3200.txt +emperor, pg3200.txt, pg100.txt +emperor--a pg3200.txt +emperor--but pg3200.txt +emperor-elect. pg3200.txt +emperor. pg3200.txt, pg100.txt +emperor: pg3200.txt +emperor; pg100.txt +emperor?" pg3200.txt +emperors pg3200.txt +emperors, pg3200.txt +emperors--but pg3200.txt +empery pg100.txt +empery, pg100.txt +empery. pg100.txt +empfehlen. pg3200.txt +emphasis pg3200.txt +emphasis! pg100.txt +emphasis, pg31100.txt, pg3200.txt +emphasis. pg3200.txt +emphasize pg3200.txt +emphasized pg3200.txt +emphasized, pg3200.txt +emphasizes pg3200.txt +emphatically pg31100.txt +empire pg3200.txt, pg100.txt +empire! pg100.txt +empire!" pg3200.txt +empire!"] pg3200.txt +empire's pg3200.txt +empire, pg3200.txt, pg100.txt +empire--officials pg3200.txt +empire. pg3200.txt, pg100.txt +empire." pg3200.txt +empire; pg3200.txt +empires pg3200.txt +empires, pg3200.txt +empires; pg3200.txt +empleached, pg100.txt +employ pg3200.txt, pg100.txt +employ'd pg100.txt +employ'd, pg100.txt +employ'd. pg100.txt +employ'd? pg100.txt +employ, pg100.txt +employ. pg3200.txt +employed pg31100.txt, pg3200.txt +employed"-- pg31100.txt +employed, pg31100.txt +employed. pg31100.txt, pg3200.txt +employee. pg3200.txt +employees pg3200.txt +employees. pg3200.txt +employer pg3200.txt +employers pg3200.txt +employment pg31100.txt, pg3200.txt, pg100.txt +employment! pg100.txt +employment, pg31100.txt, pg3200.txt, pg100.txt +employment. pg31100.txt, pg3200.txt, pg100.txt +employment." pg31100.txt +employment: pg3200.txt +employment; pg31100.txt, pg100.txt +employment] pg100.txt +employments pg31100.txt, pg3200.txt, pg100.txt +employments, pg31100.txt, pg3200.txt +employments. pg31100.txt +empoison'd, pg100.txt +emportait pg3200.txt +empress pg3200.txt, pg100.txt +empress! pg100.txt +empress, pg3200.txt +empress- pg100.txt +empress-queen: pg3200.txt +empress. pg100.txt +empress? pg100.txt +empress] pg100.txt +emptied pg3200.txt +emptied. pg3200.txt +emptier pg100.txt +emptiness pg31100.txt, pg3200.txt +emptiness, pg100.txt +emptiness--there pg3200.txt +emptiness. pg3200.txt, pg100.txt +empty pg31100.txt, pg3200.txt, pg100.txt +empty, pg3200.txt, pg100.txt +empty----" pg3200.txt +empty-handed, pg3200.txt +empty. pg31100.txt, pg3200.txt, pg100.txt +empty: pg3200.txt +empty; pg3200.txt, pg100.txt +empty? pg100.txt +emptying pg3200.txt +ems pg3200.txt +emu, pg3200.txt +emulating pg31100.txt +emulation pg3200.txt, pg100.txt +emulation. pg3200.txt, pg100.txt +en--" pg3200.txt +enable pg31100.txt, pg3200.txt +enabled pg3200.txt +enables pg3200.txt +enabling pg3200.txt +enact pg100.txt +enact? pg100.txt +enamel pg3200.txt +enamelled pg100.txt +enamoured pg3200.txt, pg100.txt +encamp'd, pg100.txt +encamp'd? pg100.txt +encamped pg3200.txt +encampment pg3200.txt +enceladus, pg100.txt +encephalic pg3200.txt +enchant pg3200.txt +enchant; pg100.txt +enchanted pg3200.txt +enchanted, pg3200.txt, pg100.txt +enchanted." pg3200.txt +enchanter. pg3200.txt +enchanters pg3200.txt +enchanting pg3200.txt +enchanting! pg3200.txt +enchanting, pg3200.txt +enchanting. pg3200.txt +enchantingly pg100.txt +enchantment pg3200.txt +enchantment, pg100.txt +enchantment--where pg3200.txt +enchantment. pg3200.txt +enchantment." pg3200.txt +enchantment; pg3200.txt +enchantments pg3200.txt +enchantments, pg3200.txt +enchantments. pg3200.txt +enchantress pg3200.txt +encircled pg3200.txt +enclose pg3200.txt +enclose! pg100.txt +enclosed pg3200.txt +enclosed) pg3200.txt +enclosed. pg3200.txt, pg100.txt +encloses. pg100.txt +enclosing pg3200.txt, pg100.txt +enclosure, pg3200.txt +enclosure--5. pg3200.txt +enclosure. pg3200.txt +enclosure: pg3200.txt +enclosures, pg31100.txt +enclouded, pg100.txt +encomiums pg3200.txt +encore pg3200.txt +encore!" pg3200.txt +encore-- pg3200.txt +encored pg3200.txt +encoring pg3200.txt +encount'red pg100.txt +encount'red. pg100.txt +encount'ring pg100.txt +encount'ring, pg100.txt +encounter pg31100.txt, pg3200.txt, pg100.txt +encounter'd! pg100.txt +encounter, pg100.txt +encounter- pg100.txt +encounter. pg100.txt +encounter; pg100.txt +encountered pg3200.txt, pg100.txt +encountered, pg3200.txt +encountered. pg3200.txt +encounters pg3200.txt, pg100.txt +encourage pg31100.txt, pg3200.txt +encouraged pg31100.txt, pg3200.txt +encouraged!! pg100.txt +encouraged. pg3200.txt +encouraged; pg31100.txt, pg100.txt +encouragement pg31100.txt, pg3200.txt +encouragement, pg31100.txt, pg3200.txt +encouragement. pg31100.txt, pg3200.txt, pg100.txt +encouragement." pg31100.txt +encouragement; pg31100.txt +encourager, pg31100.txt +encourages pg3200.txt +encouraging pg31100.txt, pg3200.txt +encouraging, pg31100.txt +encouraging. pg31100.txt, pg3200.txt +encouragingly: pg3200.txt +encreased pg31100.txt +encroach!" pg3200.txt +encroaching pg31100.txt +encroachments pg3200.txt +encroachments. pg3200.txt +encumbered pg3200.txt +encumbrance pg31100.txt +encumbrance." pg31100.txt +encumbrance? pg3200.txt +encyclopaedic pg3200.txt +encyclopedia). pg3200.txt +end pg31100.txt, pg3200.txt, pg100.txt +end! pg3200.txt, pg100.txt +end!" pg31100.txt +end) pg100.txt +end, pg31100.txt, pg3200.txt, pg100.txt +end- pg100.txt +end--but pg31100.txt +end-edges pg3200.txt +end. pg31100.txt, pg3200.txt, pg100.txt +end." pg31100.txt, pg3200.txt +end.' pg100.txt +end.] pg3200.txt +end: pg100.txt +end; pg3200.txt, pg100.txt +end? pg3200.txt, pg100.txt +end?" pg31100.txt, pg3200.txt +end?' pg3200.txt +endamagement; pg100.txt +endanger pg3200.txt, pg100.txt +endangered pg3200.txt +endangered. pg3200.txt +endangering pg31100.txt, pg3200.txt +endear'd- pg100.txt +endearing pg3200.txt +endearments pg3200.txt +endeavor pg31100.txt, pg3200.txt +endeavor, pg3200.txt +endeavor--viz: pg3200.txt +endeavoring pg3200.txt +endeavors, pg3200.txt +endeavour pg31100.txt, pg3200.txt, pg100.txt +endeavoured pg31100.txt +endeavoured, pg31100.txt +endeavouring pg31100.txt +endeavouring, pg31100.txt +endeavours pg31100.txt +endeavours, pg100.txt +endeavours; pg100.txt +ended pg31100.txt, pg3200.txt, pg100.txt +ended! pg3200.txt +ended, pg31100.txt, pg3200.txt, pg100.txt +ended--character pg3200.txt +ended. pg3200.txt, pg100.txt +ended." pg3200.txt +ended; pg31100.txt, pg3200.txt, pg100.txt +ender; pg100.txt +enderby pg3200.txt +ending pg3200.txt +ending--then pg3200.txt +ending. pg3200.txt +endings pg100.txt +endless pg31100.txt, pg3200.txt +endless. pg31100.txt +endlong pg3200.txt +endorse pg3200.txt +endorsed pg3200.txt +endorsement--toll-gates pg3200.txt +endow'd, pg100.txt +endow'd. pg100.txt +endowed pg3200.txt, pg100.txt +endowment; pg3200.txt +endowments, pg31100.txt +ends pg31100.txt, pg3200.txt, pg100.txt +ends! pg100.txt +ends, pg3200.txt, pg100.txt +ends. pg3200.txt, pg100.txt +ends; pg100.txt +endur'd! pg100.txt +endur'd. pg100.txt +endur'd; pg100.txt +endur'st, pg100.txt +endurable, pg3200.txt +endurable." pg3200.txt +endurance pg3200.txt +endurance, pg3200.txt +endurances. pg31100.txt +endure pg31100.txt, pg3200.txt, pg100.txt +endure, pg100.txt +endure. pg31100.txt, pg3200.txt, pg100.txt +endure; pg31100.txt +endured pg31100.txt, pg3200.txt +endured, pg100.txt +endured. pg3200.txt, pg100.txt +endured? pg3200.txt +enduring pg31100.txt, pg3200.txt +enduring. pg100.txt +endways. pg3200.txt +endymion, pg31100.txt, pg100.txt +enemies pg31100.txt, pg3200.txt, pg100.txt +enemies! pg100.txt +enemies, pg3200.txt, pg100.txt +enemies. pg3200.txt, pg100.txt +enemies." pg3200.txt +enemies; pg100.txt +enemies? pg100.txt +enemies?" pg31100.txt +enemy pg31100.txt, pg3200.txt, pg100.txt +enemy! pg3200.txt +enemy' pg100.txt +enemy's! pg100.txt +enemy, pg31100.txt, pg3200.txt, pg100.txt +enemy--but pg3200.txt +enemy. pg3200.txt, pg100.txt +enemy... pg3200.txt +enemy: pg3200.txt +enemy; pg3200.txt, pg100.txt +enemy? pg100.txt +energetic pg3200.txt +energetic. pg31100.txt +energetic." pg31100.txt +energies pg3200.txt +energies, pg3200.txt +energies. pg3200.txt +energies; pg3200.txt +energize pg3200.txt +energy pg31100.txt, pg3200.txt +energy, pg31100.txt, pg3200.txt +energy. pg31100.txt, pg3200.txt +energy: pg3200.txt +energyless. pg3200.txt +enew pg100.txt +enfeeble pg3200.txt +enfeebled, pg3200.txt +enfeebled. pg100.txt +enfeebled; pg100.txt +enflee. pg3200.txt +enforc'd pg100.txt +enforc'd, pg100.txt +enforce pg100.txt +enforce, pg100.txt +enforce. pg100.txt +enforced pg31100.txt, pg100.txt +enforcement pg100.txt +enforces. pg100.txt +enforceth pg3200.txt +enfranchis'd pg100.txt +enfranchisement!" pg100.txt +enfranchisement, pg100.txt +enfranchisement. pg100.txt +eng pg3200.txt +eng. pg3200.txt +eng.: pg3200.txt +engag'd pg100.txt +engage pg31100.txt, pg3200.txt +engaged pg31100.txt, pg3200.txt, pg100.txt +engaged!" pg31100.txt +engaged, pg31100.txt +engaged. pg31100.txt, pg3200.txt +engaged." pg31100.txt, pg3200.txt +engaged; pg31100.txt +engaged?" pg3200.txt +engagement pg31100.txt, pg3200.txt +engagement! pg3200.txt +engagement!" pg31100.txt +engagement, pg31100.txt, pg3200.txt +engagement--" pg31100.txt +engagement. pg31100.txt, pg3200.txt +engagement." pg31100.txt, pg3200.txt +engagement; pg31100.txt +engagement? pg31100.txt +engagement?" pg31100.txt +engagement?' pg3200.txt +engagements pg31100.txt, pg3200.txt +engagements! pg3200.txt +engagements, pg31100.txt, pg3200.txt +engagements. pg31100.txt, pg3200.txt +engages pg3200.txt +engaging pg3200.txt +engaging, pg31100.txt, pg3200.txt +engaging--not pg31100.txt +engaging; pg31100.txt +engel! pg3200.txt +engend'red; pg100.txt +engendered pg3200.txt +engine pg3200.txt, pg100.txt +engine, pg100.txt +engine. pg3200.txt +engine; pg3200.txt +engineer pg3200.txt +engineer, pg3200.txt +engineer-- pg3200.txt +engineer. pg3200.txt +engineer." pg3200.txt +engineered pg3200.txt +engineering, pg3200.txt +engineering; pg3200.txt +engineers pg3200.txt +engineers, pg3200.txt +engineers. pg3200.txt +engineers." pg3200.txt +engineers; pg3200.txt +enginer pg100.txt +engines pg3200.txt +engines, pg3200.txt +engines. pg3200.txt +englan'!" pg3200.txt +england pg31100.txt, pg3200.txt, pg100.txt +england! pg31100.txt, pg3200.txt, pg100.txt +england!" pg3200.txt +england!'" pg3200.txt +england's pg3200.txt +england, pg31100.txt, pg3200.txt, pg100.txt +england," pg3200.txt +england- pg100.txt +england--and pg3200.txt +england--as pg3200.txt +england--know pg3200.txt +england--old pg3200.txt +england. pg31100.txt, pg3200.txt, pg100.txt +england." pg31100.txt, pg3200.txt +england."' pg100.txt +england.' pg3200.txt +england: pg3200.txt +england; pg31100.txt, pg3200.txt, pg100.txt +england? pg100.txt +england?" pg3200.txt +england_ pg3200.txt +englander, pg3200.txt +englands. pg3200.txt +english pg31100.txt, pg3200.txt, pg100.txt +english! pg3200.txt +english!" pg3200.txt +english" pg3200.txt +english, pg31100.txt, pg3200.txt, pg100.txt +english--and pg3200.txt +english--the pg3200.txt +english-hearted pg3200.txt +english-writing pg3200.txt +english. pg31100.txt, pg3200.txt, pg100.txt +english." pg3200.txt +english: pg3200.txt +english; pg3200.txt +english? pg100.txt +english?" pg3200.txt +englishman pg3200.txt +englishman's pg3200.txt +englishman's.' pg100.txt +englishman, pg3200.txt +englishman- pg100.txt +englishman--one pg3200.txt +englishman. pg3200.txt, pg100.txt +englishman." pg3200.txt +englishman; pg3200.txt +englishman? pg100.txt +englishmen pg3200.txt +englishmen, pg3200.txt, pg100.txt +englishmen. pg3200.txt +englishmen; pg100.txt +englishwoman pg3200.txt +engraffed pg100.txt +engrafted pg31100.txt +engrav'd, pg100.txt +engrave pg100.txt +engraved pg3200.txt +engraver pg3200.txt +engraving pg3200.txt +engravings pg3200.txt +engravings) pg3200.txt +engross pg31100.txt +engross'd pg100.txt +engrossed pg31100.txt +engrossed, pg100.txt +engulf pg3200.txt +enhance pg31100.txt, pg3200.txt +enhance. pg3200.txt +enigma pg3200.txt +enigma, pg3200.txt +enigma? pg100.txt +enigmas pg3200.txt +enigmas." pg3200.txt +enigmatical; pg100.txt +enjoin'd pg100.txt +enjoined pg3200.txt +enjoined. pg3200.txt +enjoy pg31100.txt, pg3200.txt, pg100.txt +enjoy! pg100.txt +enjoy'd, pg100.txt +enjoy, pg3200.txt, pg100.txt +enjoy. pg3200.txt, pg100.txt +enjoy." pg31100.txt, pg3200.txt +enjoy; pg100.txt +enjoy? pg100.txt +enjoyable pg3200.txt +enjoyable, pg3200.txt +enjoyed pg31100.txt, pg3200.txt +enjoyed, pg100.txt +enjoyed. pg31100.txt, pg100.txt +enjoying pg31100.txt, pg3200.txt +enjoying, pg3200.txt +enjoying. pg3200.txt, pg100.txt +enjoyment pg31100.txt, pg3200.txt +enjoyment, pg31100.txt +enjoyment. pg31100.txt, pg3200.txt +enjoyment." pg31100.txt +enjoyment.' pg3200.txt +enjoyment; pg31100.txt, pg3200.txt +enjoyments pg31100.txt, pg3200.txt +enjoyments, pg31100.txt +enjoys pg3200.txt +enjoys, pg100.txt +enjoys. pg100.txt +enjoys; pg100.txt +enkindled pg100.txt +enlarg'd pg100.txt +enlarge pg31100.txt, pg3200.txt +enlarge. pg3200.txt +enlarged pg3200.txt +enlarged, pg100.txt +enlargement pg3200.txt, pg100.txt +enlargement, pg3200.txt +enlargements, pg3200.txt +enlarging pg3200.txt +enlighten pg3200.txt +enlightened pg31100.txt +enlightened; pg3200.txt +enlightening pg3200.txt +enlist pg3200.txt +enlisted pg3200.txt +enlivened. pg31100.txt +enlivening. pg3200.txt +enmity pg31100.txt, pg100.txt +enmity! pg100.txt +enmity. pg100.txt +enmity; pg100.txt +enmity? pg100.txt +ennobled pg3200.txt +ennobled!" pg3200.txt +ennobled, pg100.txt +ennobles pg3200.txt +ennui, pg31100.txt +enobarb pg100.txt +enobarbus pg100.txt +enobarbus! pg100.txt +enobarbus, pg100.txt +enobarbus. pg100.txt +enobarbus? pg100.txt +enormous pg31100.txt, pg3200.txt +enormous. pg3200.txt +enormously pg3200.txt +enough pg31100.txt, pg3200.txt, pg100.txt +enough! pg31100.txt, pg3200.txt, pg100.txt +enough!" pg3200.txt, pg100.txt +enough) pg3200.txt +enough, pg31100.txt, pg3200.txt, pg100.txt +enough," pg3200.txt +enough- pg100.txt +enough-- pg3200.txt +enough--but pg3200.txt +enough--i pg31100.txt +enough--put pg3200.txt +enough--they pg3200.txt +enough--turn pg3200.txt +enough. pg31100.txt, pg3200.txt, pg100.txt +enough." pg31100.txt, pg3200.txt +enough.--my pg31100.txt +enough: pg31100.txt, pg3200.txt, pg100.txt +enough; pg31100.txt, pg3200.txt, pg100.txt +enough;" pg31100.txt +enough;' pg3200.txt +enough? pg3200.txt, pg100.txt +enough?" pg3200.txt +enough?--there pg31100.txt +enow pg100.txt +enow, pg3200.txt +enow. pg100.txt +enow." pg3200.txt +enquire pg31100.txt +enquire. pg100.txt +enquired pg31100.txt, pg100.txt +enquired, pg100.txt +enquirer, pg3200.txt +enquirer: pg3200.txt +enquiries pg31100.txt +enquiries, pg31100.txt +enquiries. pg31100.txt +enquiring pg31100.txt +enquiry pg31100.txt +enquiry, pg31100.txt +enrag'd, pg100.txt +enrage pg3200.txt, pg100.txt +enraged pg31100.txt, pg3200.txt +enrapt pg100.txt +enrich pg3200.txt +enrich'd pg100.txt +enrich'd, pg100.txt +enriched pg3200.txt, pg100.txt +enriched, pg100.txt +enriches pg100.txt +enriching pg3200.txt +enrichment pg3200.txt +enrob'd, pg100.txt +enroll'd pg100.txt +enrolled pg3200.txt +ensample. pg3200.txt +enscombe pg31100.txt +enscombe, pg31100.txt +enscombe. pg31100.txt +enscombe." pg31100.txt +ensconce pg100.txt +enseigne? pg100.txt +ensemble: pg100.txt +enshrine pg3200.txt +ensign pg100.txt +ensign. pg31100.txt +enslaver pg3200.txt +enslaving pg3200.txt +enslaving. pg3200.txt +ensman. pg100.txt +ensue pg100.txt +ensue, pg31100.txt, pg3200.txt, pg100.txt +ensue. pg3200.txt, pg100.txt +ensue: pg100.txt +ensue; pg100.txt +ensued pg31100.txt, pg3200.txt +ensued, pg3200.txt +ensued. pg3200.txt +ensued: pg3200.txt +ensued? pg100.txt +ensues pg100.txt +ensues. pg100.txt +ensuing. pg31100.txt, pg100.txt +ensure pg31100.txt +ensured pg31100.txt +ent'red pg100.txt +ent'red, pg100.txt +ent'red. pg100.txt +entail pg100.txt +entail. pg31100.txt +entailed, pg31100.txt +entailed." pg31100.txt +entangles pg100.txt +entendre). pg3200.txt +entendu). pg3200.txt +entente, pg3200.txt +enter pg31100.txt, pg3200.txt, pg100.txt +enter'd pg100.txt +enter, pg100.txt +enter. pg3200.txt, pg100.txt +enter; pg3200.txt +enter] pg100.txt +entered pg31100.txt, pg3200.txt, pg100.txt +entered, pg31100.txt, pg3200.txt +entered,--"the pg3200.txt +entered. pg3200.txt +entering pg31100.txt, pg3200.txt, pg100.txt +entering, pg3200.txt +enterprise pg3200.txt, pg100.txt +enterprise! pg100.txt +enterprise, pg3200.txt, pg100.txt +enterprise," pg3200.txt +enterprise--and pg3200.txt +enterprise--none pg3200.txt +enterprise--now, pg3200.txt +enterprise. pg3200.txt, pg100.txt +enterprise: pg3200.txt +enterprise; pg3200.txt, pg100.txt +enterprise? pg3200.txt +enterprises pg100.txt +enterprises, pg3200.txt +enterprises. pg3200.txt, pg100.txt +enterprises; pg3200.txt +enterprising pg3200.txt +enterprising, pg3200.txt +enters pg3200.txt, pg100.txt +enters. pg3200.txt, pg100.txt +entertain pg31100.txt, pg3200.txt, pg100.txt +entertain'd pg100.txt +entertain'd. pg100.txt +entertain, pg100.txt +entertain. pg100.txt +entertained pg31100.txt, pg3200.txt +entertained, pg31100.txt +entertained. pg31100.txt, pg3200.txt +entertained." pg3200.txt +entertainer, pg3200.txt +entertainer- pg100.txt +entertaining pg31100.txt, pg3200.txt +entertaining, pg31100.txt, pg3200.txt +entertaining. pg3200.txt +entertaining." pg31100.txt +entertainingly; pg31100.txt +entertainment pg31100.txt, pg3200.txt, pg100.txt +entertainment, pg100.txt +entertainment. pg31100.txt, pg3200.txt, pg100.txt +entertainments. pg3200.txt +entertains pg3200.txt +entertains. pg3200.txt +enthroned, pg100.txt +enthusiasm pg31100.txt, pg3200.txt +enthusiasm, pg3200.txt +enthusiasm. pg31100.txt, pg3200.txt +enthusiasm.] pg3200.txt +enthusiasm: pg3200.txt +enthusiasms pg3200.txt +enthusiast pg3200.txt +enthusiast, pg3200.txt +enthusiastic pg3200.txt +enthusiastic--telling pg3200.txt +enthusiastic. pg3200.txt +enthusiastically pg3200.txt +enthusiastically, pg3200.txt +enthusiasts pg31100.txt, pg3200.txt +enthusing pg3200.txt +enticing pg3200.txt +entire pg31100.txt, pg3200.txt, pg100.txt +entire, pg31100.txt +entire: pg3200.txt +entirely pg31100.txt, pg3200.txt +entirely, pg31100.txt, pg3200.txt +entirely--not pg31100.txt +entirely--oh, pg3200.txt +entirely. pg31100.txt, pg3200.txt, pg100.txt +entirely." pg31100.txt +entirely: pg3200.txt +entirely? pg100.txt +entirety, pg3200.txt +entitled pg31100.txt, pg3200.txt +entitled, pg3200.txt +entitled. pg3200.txt +entitles pg3200.txt +entity pg31100.txt, pg3200.txt, pg100.txt +entity. pg3200.txt +entomb, pg100.txt +entrails pg100.txt +entrails, pg3200.txt +entrails? pg100.txt +entrance pg31100.txt, pg3200.txt +entrance, pg31100.txt, pg3200.txt +entrance-door pg3200.txt +entrance-passage pg31100.txt +entrance. pg31100.txt, pg3200.txt, pg100.txt +entrance; pg100.txt +entrance? pg3200.txt +entrances pg3200.txt +entrances; pg100.txt +entrancing pg3200.txt +entrap pg3200.txt, pg100.txt +entreat pg31100.txt, pg3200.txt, pg100.txt +entreat, pg100.txt +entreat. pg100.txt +entreat." pg31100.txt +entreat; pg100.txt +entreated pg31100.txt, pg100.txt +entreated, pg100.txt +entreated. pg100.txt +entreaties pg31100.txt, pg3200.txt, pg100.txt +entreaties, pg31100.txt, pg3200.txt, pg100.txt +entreaties. pg31100.txt +entreaties; pg100.txt +entreating pg31100.txt +entreats pg100.txt +entreats, pg100.txt +entreats. pg100.txt +entreaty pg31100.txt +entreaty, pg31100.txt +entreaty. pg31100.txt, pg100.txt +entreaty." pg3200.txt +entree pg31100.txt +entries pg3200.txt +entries. pg3200.txt +entrust pg3200.txt +entrusted pg31100.txt, pg3200.txt +entry pg3200.txt +entry, pg3200.txt +entry: pg3200.txt +entwine, pg3200.txt +enuf! pg3200.txt +enumerate pg3200.txt +enumerate. pg31100.txt +enumerated pg3200.txt +enumerating pg3200.txt +enunciation pg3200.txt +envelop pg3200.txt +envelope, pg3200.txt +enveloped pg3200.txt +envelopes. pg3200.txt +envenomed pg100.txt +enviable pg3200.txt +envied pg31100.txt, pg3200.txt, pg100.txt +envied, pg3200.txt +envied. pg3200.txt +envious pg3200.txt +envious, pg3200.txt, pg100.txt +envious. pg3200.txt, pg100.txt +envious? pg100.txt +environing pg3200.txt +environment pg3200.txt +environment. pg3200.txt +environs pg31100.txt, pg3200.txt +environs; pg3200.txt +envoy pg3200.txt +envoys pg3200.txt +envy pg31100.txt, pg3200.txt, pg100.txt +envy! pg100.txt +envy, pg3200.txt +envy--but pg3200.txt +envy. pg3200.txt, pg100.txt +envy." pg31100.txt +envy; pg100.txt +envy? pg100.txt +envying pg31100.txt +eoconomical pg31100.txt +eoconomy pg31100.txt +epaulets, pg3200.txt +eph." pg3200.txt +ephesus pg3200.txt, pg100.txt +ephesus! pg3200.txt +ephesus, pg3200.txt, pg100.txt +ephesus," pg3200.txt +ephesus. pg3200.txt, pg100.txt +ephesus; pg100.txt +ephraim pg3200.txt +epicure. pg100.txt +epicures! pg100.txt +epicurism. pg31100.txt +epidamnum pg100.txt +epidamnum, pg100.txt +epidamnum. pg100.txt +epidemic pg3200.txt +epidemic, pg3200.txt +epidemic. pg3200.txt +epidemic?' pg3200.txt +epigrammatic. pg3200.txt +epilepsy. pg100.txt +epileptic: pg3200.txt +epilogue pg31100.txt, pg100.txt +epilogue, pg100.txt +epilogue. pg100.txt +epilogue; pg100.txt +epilogues. pg100.txt +episode pg3200.txt +episode. pg3200.txt +episode: pg3200.txt +episodes pg3200.txt +episodes. pg3200.txt +epistles; pg3200.txt +epitaph pg100.txt +epitaph! pg100.txt +epitaph, pg100.txt +epitaph. pg100.txt +epitaph; pg100.txt +epitaphs; pg100.txt +epithet pg3200.txt +epithet. pg31100.txt, pg100.txt +epithet; pg3200.txt, pg100.txt +epitheton pg100.txt +epithets pg3200.txt +epithets, pg31100.txt +epithets. pg3200.txt +epitome pg3200.txt +epoch pg3200.txt +epoch. pg31100.txt +epochs pg3200.txt +eppenschlag, pg3200.txt +epsom. pg31100.txt +equable, pg3200.txt +equable. pg3200.txt +equal pg31100.txt, pg3200.txt, pg100.txt +equal, pg31100.txt, pg3200.txt +equal. pg31100.txt, pg3200.txt, pg100.txt +equal." pg31100.txt, pg3200.txt +equal.' pg3200.txt +equal; pg31100.txt, pg3200.txt +equal? pg3200.txt +equaled pg3200.txt +equality pg31100.txt, pg3200.txt, pg100.txt +equality." pg3200.txt +equality: pg3200.txt +equall'd. pg100.txt +equalled pg31100.txt, pg3200.txt +equalled, pg31100.txt +equalled; pg3200.txt +equalling pg3200.txt +equally pg31100.txt, pg3200.txt +equally, pg3200.txt +equally. pg100.txt +equals pg3200.txt +equator pg3200.txt +equator, pg3200.txt +equator," pg3200.txt +equator. pg3200.txt +equator.' pg3200.txt +equator.] pg3200.txt +equator; pg3200.txt +equerry pg3200.txt +equi--" pg3200.txt +equidistant pg3200.txt +equilibree. pg3200.txt +equinox!" pg3200.txt +equinox, pg31100.txt, pg3200.txt, pg100.txt +equinox." pg3200.txt +equipage pg31100.txt, pg3200.txt +equipage, pg31100.txt +equipage. pg100.txt +equipage: pg100.txt +equipment pg3200.txt +equipment. pg31100.txt, pg3200.txt, pg100.txt +equipment? pg3200.txt +equipped pg3200.txt +equipped, pg31100.txt +equipped. pg3200.txt +equipping, pg3200.txt +equity, pg3200.txt, pg100.txt +equity. pg31100.txt +equivalent pg31100.txt, pg3200.txt +equivalent"--meaning pg3200.txt +equivalent. pg3200.txt +equivalents pg3200.txt +equivocal. pg100.txt +equivocation, pg31100.txt +er--" pg3200.txt +er--lady pg3200.txt +era pg3200.txt +era, pg3200.txt +era. pg3200.txt +eradicated pg3200.txt +erard, pg3200.txt +erckmann-chatrian, pg3200.txt +erebus. pg100.txt +erect pg3200.txt, pg100.txt +erect, pg3200.txt +erected pg31100.txt, pg3200.txt, pg100.txt +erected. pg3200.txt +erecting pg100.txt +erection. pg100.txt +erection; pg100.txt +erects pg100.txt +erewhile, pg100.txt +erewhile. pg100.txt +erewhile? pg100.txt +erganzungsrevisionsfund pg3200.txt +ergo, pg3200.txt +erhalte, pg3200.txt +erickson. pg3200.txt +erie pg3200.txt +ermengare, pg100.txt +ermine, pg3200.txt +eros pg100.txt +eros! pg100.txt +eros, pg100.txt +eros. pg100.txt +erosions pg3200.txt +erotica. pg3200.txt +erpingham pg100.txt +erpingham. pg100.txt +erpingham: pg100.txt +err pg31100.txt, pg3200.txt, pg100.txt +err'd. pg100.txt +err, pg100.txt +err- pg100.txt +err--he pg3200.txt +err. pg3200.txt, pg100.txt +err?" pg3200.txt +errand pg31100.txt, pg3200.txt, pg100.txt +errand!" pg31100.txt +errand, pg31100.txt, pg3200.txt +errand--" pg3200.txt +errand. pg31100.txt, pg3200.txt, pg100.txt +errand." pg31100.txt +errand? pg3200.txt +errand?" pg3200.txt +errands pg31100.txt, pg3200.txt +errands, pg3200.txt +errands. pg3200.txt +errant pg3200.txt +erratic pg31100.txt +erred, pg3200.txt, pg100.txt +erring. pg3200.txt +erro-nort pg3200.txt +erronorts pg3200.txt +error pg31100.txt, pg3200.txt, pg100.txt +error!" pg31100.txt +error, pg31100.txt, pg3200.txt, pg100.txt +error. pg3200.txt, pg100.txt +error." pg31100.txt, pg3200.txt +error; pg3200.txt +error? pg100.txt +errors pg31100.txt, pg3200.txt, pg100.txt +errors, pg31100.txt, pg3200.txt +errors. pg3200.txt +errs!" pg3200.txt +errs? pg100.txt +erse.'" pg3200.txt +erst pg3200.txt +erst, pg100.txt +erudite pg3200.txt +erudition pg3200.txt +erudition--an pg3200.txt +erudition; pg100.txt +erzahlen. pg3200.txt +esau pg3200.txt +escalus pg100.txt +escalus! pg100.txt +escalus, pg100.txt +escalus. pg100.txt +escap'd pg100.txt +escap'd. pg100.txt +escap'dst. pg100.txt +escape pg31100.txt, pg3200.txt, pg100.txt +escape!" pg3200.txt +escape) pg3200.txt +escape, pg3200.txt, pg100.txt +escape--following pg3200.txt +escape-valve, pg3200.txt +escape. pg3200.txt, pg100.txt +escape." pg3200.txt +escape; pg100.txt +escape? pg100.txt +escape?" pg3200.txt +escape?' pg3200.txt +escaped pg31100.txt, pg3200.txt +escaped!" pg3200.txt +escaped, pg3200.txt +escaped. pg31100.txt, pg3200.txt +escaped." pg3200.txt +escapes pg31100.txt, pg3200.txt, pg100.txt +escapes. pg100.txt +escaping pg31100.txt, pg3200.txt +eschol pg3200.txt +escort pg3200.txt +escort, pg3200.txt +escort. pg3200.txt +escorted pg3200.txt +escritoire pg3200.txt +escutcheon, pg3200.txt +esdraelon, pg3200.txt +esdraelon. pg3200.txt +esel! pg3200.txt +esmeralda pg3200.txt +esmeralda, pg3200.txt +esmeralda. pg3200.txt +espartero, pg3200.txt +especial pg3200.txt +especially pg31100.txt, pg3200.txt, pg100.txt +especially, pg31100.txt, pg100.txt +especially. pg3200.txt +especially." pg31100.txt +especially; pg3200.txt +esperance! pg100.txt +espials) pg100.txt +espied pg3200.txt +espied. pg100.txt +espies pg100.txt +espous'd; pg100.txt +espouse pg100.txt +espouse. pg100.txt +espoused pg31100.txt, pg3200.txt +espy pg100.txt +espy, pg100.txt +esq. pg3200.txt +esq: pg3200.txt +esqre. pg31100.txt +esquire. pg100.txt +esquire; pg100.txt +esquires; pg3200.txt, pg100.txt +essay pg3200.txt +essay, pg31100.txt +essay. pg3200.txt +essays pg3200.txt +essays, pg3200.txt +essays. pg3200.txt +essays.' pg3200.txt +essence pg3200.txt +essential pg31100.txt, pg3200.txt +essential, pg3200.txt +essential. pg3200.txt +essentials pg3200.txt +essentials, pg31100.txt +essentials. pg3200.txt +essentials." pg31100.txt +esses. pg100.txt +essex pg100.txt +essex, pg3200.txt +est pg3200.txt, pg100.txt +est. pg3200.txt +establish pg31100.txt, pg3200.txt +establish'd pg100.txt +establish'd; pg100.txt +established pg31100.txt, pg3200.txt +established, pg31100.txt, pg3200.txt +established--that pg3200.txt +established. pg3200.txt +established." pg31100.txt, pg3200.txt +established; pg100.txt +establishes pg3200.txt +establishing pg3200.txt +establishment pg31100.txt, pg3200.txt +establishment, pg3200.txt +establishment,' pg3200.txt +establishment. pg31100.txt, pg3200.txt +establishment?--if pg31100.txt +establishments, pg3200.txt +estate pg31100.txt, pg3200.txt, pg100.txt +estate!" pg3200.txt +estate, pg31100.txt, pg3200.txt, pg100.txt +estate--"and pg3200.txt +estate--married pg3200.txt +estate--she pg3200.txt +estate. pg31100.txt, pg3200.txt, pg100.txt +estate." pg31100.txt +estate; pg3200.txt +estate? pg100.txt +estates pg31100.txt, pg3200.txt, pg100.txt +estates- pg100.txt +estates. pg3200.txt, pg100.txt +estates." pg31100.txt +esteem pg31100.txt, pg3200.txt, pg100.txt +esteem'd, pg100.txt +esteem'd. pg100.txt +esteem, pg31100.txt, pg3200.txt, pg100.txt +esteem. pg31100.txt, pg3200.txt, pg100.txt +esteemed pg31100.txt, pg3200.txt, pg100.txt +esteemed, pg3200.txt, pg100.txt +esteemed. pg100.txt +esteemed: pg100.txt +esteeming, pg100.txt +esteems pg31100.txt +esthetic pg3200.txt +estimable, pg31100.txt +estimate pg31100.txt, pg3200.txt +estimate, pg31100.txt, pg3200.txt, pg100.txt +estimate--we pg3200.txt +estimate. pg3200.txt, pg100.txt +estimate: pg100.txt +estimated pg3200.txt +estimated. pg3200.txt +estimates pg3200.txt +estimating pg3200.txt +estimation pg31100.txt, pg100.txt +estimation! pg100.txt +estimation, pg31100.txt, pg3200.txt, pg100.txt +estimation. pg100.txt +estimation.' pg100.txt +estranged? pg100.txt +estrangement. pg31100.txt +etait pg3200.txt +etc. pg31100.txt, pg3200.txt, pg100.txt +etc." pg31100.txt, pg3200.txt +etc.' pg3200.txt +etc.), pg3200.txt +etc., pg31100.txt, pg3200.txt, pg100.txt +etc.--5. pg3200.txt +etc..... pg3200.txt +etc.; pg31100.txt, pg3200.txt +etc.;--and pg3200.txt +etchings pg3200.txt +eternal pg3200.txt +eternal, pg3200.txt +eternal," pg3200.txt +eternal. pg31100.txt, pg3200.txt, pg100.txt +eternal." pg3200.txt +eternal; pg3200.txt +eternally pg3200.txt +eterne, pg100.txt +eterne. pg100.txt +eternities pg3200.txt +eternities!" pg3200.txt +eternities. pg3200.txt +eternity pg3200.txt +eternity! pg3200.txt +eternity, pg3200.txt, pg100.txt +eternity. pg3200.txt, pg100.txt +eternity." pg3200.txt +etext pg100.txt +etext, pg100.txt +ethelton pg3200.txt +ethelton!" pg3200.txt +ethereal pg3200.txt +etherealizes pg3200.txt +ethics pg3200.txt +ethiope! pg100.txt +ethiope. pg100.txt +etienne pg3200.txt +etienne." pg3200.txt +etiquette pg3200.txt +etiquette--something pg3200.txt +etiquette. pg3200.txt +etna, pg100.txt +eton pg100.txt +etretat, pg3200.txt +etwa? pg3200.txt +etwas pg3200.txt +euchre pg3200.txt +euchre-parties pg3200.txt +euchre. pg3200.txt +euclid pg3200.txt +euclid. pg3200.txt +eulogy pg3200.txt +eunuch pg100.txt +eunuch. pg100.txt +euphemism pg3200.txt +euphrates, pg100.txt +euphronius pg100.txt +euphuists pg3200.txt +euriphile, pg100.txt +europe pg3200.txt +europe! pg3200.txt, pg100.txt +europe, pg3200.txt +europe--" pg3200.txt +europe--comfort. pg3200.txt +europe--is pg3200.txt +europe. pg3200.txt, pg100.txt +europe.' pg100.txt +europe: pg3200.txt +europe; pg3200.txt, pg100.txt +europe? pg3200.txt +europe?" pg3200.txt +european pg3200.txt +european. pg3200.txt +europeans pg3200.txt +europeans. pg3200.txt +ev'n! pg100.txt +ev'rything pg100.txt +ev--" pg3200.txt +evacuated pg3200.txt +evacuated. pg3200.txt +evade pg31100.txt +evade, pg31100.txt +evaded pg3200.txt +evangelists, pg3200.txt +evangelization. pg3200.txt +evans pg100.txt +evans, pg100.txt +evaporation pg3200.txt +evasion pg3200.txt, pg100.txt +evasion! pg100.txt +evasions pg3200.txt +evasions, pg3200.txt +eve. pg3200.txt, pg100.txt +eve." pg3200.txt +eveeloj.' pg3200.txt +evelyn, pg31100.txt +even pg31100.txt, pg3200.txt, pg100.txt +even" pg3200.txt +even, pg3200.txt, pg100.txt +even--" pg31100.txt +even-handed pg3200.txt +even-pleach'd, pg100.txt +even. pg3200.txt, pg100.txt +even." pg3200.txt +even? pg100.txt +even?" pg31100.txt +evenin'. pg3200.txt +evening pg31100.txt, pg3200.txt +evening! pg31100.txt, pg100.txt +evening!--well, pg31100.txt +evening', pg3200.txt +evening's pg31100.txt +evening, pg31100.txt, pg3200.txt, pg100.txt +evening--the pg31100.txt +evening--where pg3200.txt +evening-parties pg31100.txt +evening. pg31100.txt, pg3200.txt, pg100.txt +evening." pg31100.txt, pg3200.txt +evening; pg31100.txt, pg3200.txt +evening? pg3200.txt, pg100.txt +evening?" pg31100.txt, pg3200.txt +evenings pg31100.txt, pg3200.txt +evenings. pg3200.txt +evenly pg3200.txt +evenly. pg100.txt +evenness pg31100.txt +evensong." pg3200.txt +event pg31100.txt, pg3200.txt, pg100.txt +event, pg3200.txt, pg100.txt +event,- pg100.txt +event-- pg31100.txt +event. pg31100.txt, pg3200.txt, pg100.txt +event." pg31100.txt +event: pg3200.txt +event; pg3200.txt +eventful pg3200.txt +eventide pg3200.txt +events pg31100.txt, pg3200.txt, pg100.txt +events! pg100.txt +events, pg31100.txt, pg3200.txt, pg100.txt +events--on pg3200.txt +events. pg31100.txt, pg3200.txt, pg100.txt +events: pg31100.txt +events; pg3200.txt +eventually pg31100.txt, pg3200.txt +eventuate pg3200.txt +ever! pg100.txt +ever!" pg3200.txt +ever, pg31100.txt, pg3200.txt, pg100.txt +ever-- pg3200.txt +ever--" pg3200.txt +ever--' pg3200.txt +ever--good pg3200.txt +ever--it's pg3200.txt +ever-esteemed pg100.txt +ever-glorious pg3200.txt +ever-increasing pg3200.txt +ever-lengthening pg3200.txt +ever-watchful pg3200.txt +ever. pg31100.txt, pg3200.txt, pg100.txt +ever." pg31100.txt, pg3200.txt +ever.' pg100.txt +ever; pg31100.txt, pg3200.txt, pg100.txt +everest pg3200.txt +everest. pg3200.txt +evergreen! pg31100.txt +everingham, pg31100.txt +everingham." pg31100.txt +everlasting pg31100.txt, pg3200.txt, pg100.txt +everlasting!" pg3200.txt +everlastingly pg3200.txt, pg100.txt +everlastingly. pg100.txt +evermore pg100.txt +evermore! pg3200.txt, pg100.txt +evermore. pg100.txt +every-which-way, pg3200.txt +everybody pg31100.txt, pg3200.txt +everybody's pg31100.txt, pg3200.txt +everybody's, pg3200.txt +everybody's-even pg3200.txt +everybody's; pg3200.txt +everybody, pg31100.txt, pg3200.txt +everybody--"goodness!" pg3200.txt +everybody--"mercy!" pg3200.txt +everybody--"what!" pg3200.txt +everybody--even pg3200.txt +everybody--including pg3200.txt +everybody--lived pg3200.txt +everybody. pg31100.txt, pg3200.txt +everybody." pg31100.txt, pg3200.txt +everybody: pg3200.txt +everybody; pg3200.txt +everybody?" pg31100.txt +everyday pg31100.txt +everyone's pg31100.txt +everyone. pg31100.txt, pg100.txt +everything pg31100.txt, pg3200.txt, pg100.txt +everything! pg3200.txt, pg100.txt +everything!" pg3200.txt +everything, pg31100.txt, pg3200.txt +everything- pg100.txt +everything--anything pg3200.txt +everything--even pg3200.txt +everything--in pg3200.txt +everything-in-which, pg3200.txt +everything. pg31100.txt, pg3200.txt, pg100.txt +everything." pg31100.txt, pg3200.txt +everything.' pg3200.txt +everything: pg3200.txt +everything; pg31100.txt, pg3200.txt +everywhah, pg3200.txt +everywhere pg31100.txt, pg3200.txt +everywhere! pg3200.txt, pg100.txt +everywhere!" pg3200.txt +everywhere, pg3200.txt +everywhere--" pg3200.txt +everywhere--and pg3200.txt +everywhere. pg3200.txt, pg100.txt +everywhere." pg31100.txt, pg3200.txt +everywhere: pg3200.txt +everywhere; pg3200.txt, pg100.txt +everywheres pg3200.txt +everywheres. pg3200.txt +everywheres.' pg3200.txt +everywheres?" pg3200.txt +evidence pg3200.txt, pg100.txt +evidence!" pg3200.txt +evidence, pg3200.txt, pg100.txt +evidence,--" pg3200.txt +evidence. pg3200.txt, pg100.txt +evidence." pg3200.txt +evidence; pg3200.txt +evidence?" pg3200.txt +evidences pg3200.txt +evidences, pg3200.txt +evidences- pg100.txt +evidences. pg3200.txt +evident pg31100.txt, pg3200.txt, pg100.txt +evident, pg31100.txt, pg3200.txt, pg100.txt +evident. pg31100.txt +evident: pg100.txt +evidently pg31100.txt, pg3200.txt +evidently, pg31100.txt, pg3200.txt +evidently. pg3200.txt +evil pg31100.txt, pg3200.txt, pg100.txt +evil! pg31100.txt +evil, pg31100.txt, pg3200.txt, pg100.txt +evil--he pg3200.txt +evil-minded pg31100.txt +evil. pg31100.txt, pg3200.txt, pg100.txt +evil." pg31100.txt, pg3200.txt +evil: pg100.txt +evil; pg31100.txt, pg100.txt +evil? pg3200.txt, pg100.txt +evils pg31100.txt, pg3200.txt, pg100.txt +evils, pg100.txt +evils- pg100.txt +evils. pg3200.txt, pg100.txt +evils; pg100.txt +evince pg3200.txt +evinced pg3200.txt +evitant pg3200.txt +evoked pg3200.txt +evoked--these pg3200.txt +evolution pg3200.txt +evolved pg3200.txt +evyn pg3200.txt +ewes pg100.txt +ewes, pg100.txt +ex--" pg3200.txt +ex----" pg3200.txt +ex-convicts. pg3200.txt +ex-thief pg3200.txt +exact pg31100.txt, pg3200.txt +exact, pg3200.txt, pg100.txt +exact--like pg3200.txt +exact. pg3200.txt +exact." pg3200.txt +exacted pg3200.txt +exactest pg3200.txt +exacting pg31100.txt, pg3200.txt +exacting, pg100.txt +exacting. pg3200.txt +exaction! pg100.txt +exaction? pg100.txt +exactions, pg100.txt +exactitude, pg3200.txt +exactly pg31100.txt, pg3200.txt, pg100.txt +exactly, pg3200.txt +exactly. pg3200.txt +exactly." pg31100.txt +exactly; pg3200.txt +exactness pg3200.txt +exactness. pg31100.txt, pg3200.txt +exactness; pg3200.txt +exaggerate pg3200.txt +exaggerated pg3200.txt +exaggeration pg3200.txt +exaggeration." pg3200.txt +exaggerations pg3200.txt +exaggerations--and pg3200.txt +exalt pg3200.txt +exaltation. pg3200.txt +exaltation; pg3200.txt +exalted pg31100.txt, pg3200.txt +exalted, pg3200.txt +exalting pg3200.txt +examin'd. pg100.txt +examination pg31100.txt, pg3200.txt, pg100.txt +examination--the pg3200.txt +examination. pg31100.txt, pg3200.txt +examination: pg3200.txt +examination; pg3200.txt +examination? pg100.txt +examination?" pg3200.txt +examinations pg3200.txt +examine pg31100.txt, pg3200.txt, pg100.txt +examine, pg31100.txt, pg3200.txt +examine. pg3200.txt, pg100.txt +examined pg3200.txt, pg100.txt +examined, pg31100.txt, pg3200.txt +examined. pg3200.txt +examined." pg3200.txt +examined? pg100.txt +examines pg3200.txt, pg100.txt +examining pg31100.txt, pg3200.txt +examining, pg3200.txt +examining. pg3200.txt +example pg31100.txt, pg3200.txt, pg100.txt +example, pg31100.txt, pg3200.txt, pg100.txt +example. pg3200.txt, pg100.txt +example." pg3200.txt +example: pg3200.txt +example; pg3200.txt +examples pg31100.txt, pg100.txt +examples, pg3200.txt +examples. pg3200.txt, pg100.txt +examples: pg3200.txt +exasperate pg31100.txt +exasperated; pg3200.txt +exasperating pg3200.txt +exasperating. pg3200.txt +exasperation pg3200.txt +exasperation. pg3200.txt +excavate, pg3200.txt +excavated pg3200.txt +excavations pg3200.txt +exceed pg3200.txt, pg100.txt +exceed, pg100.txt +exceed. pg100.txt +exceeded pg31100.txt, pg3200.txt +exceeded, pg3200.txt +exceeding pg31100.txt, pg3200.txt, pg100.txt +exceedingly pg31100.txt, pg3200.txt +exceedingly. pg31100.txt, pg3200.txt +exceedingly." pg31100.txt +exceeds pg100.txt +exceeds? pg100.txt +excel pg31100.txt, pg100.txt +excel. pg100.txt +excel: pg100.txt +excellence pg31100.txt, pg3200.txt, pg100.txt +excellence!' pg100.txt +excellence, pg31100.txt, pg100.txt +excellence.--[what pg3200.txt +excellence: pg100.txt +excellence; pg100.txt +excellence? pg100.txt +excellencies pg31100.txt +excellency pg31100.txt, pg3200.txt, pg100.txt +excellency, pg3200.txt +excellency. pg3200.txt +excellency." pg3200.txt +excellency?" pg3200.txt +excellent pg31100.txt, pg3200.txt, pg100.txt +excellent! pg100.txt +excellent, pg31100.txt, pg3200.txt, pg100.txt +excellent. pg3200.txt, pg100.txt +excellent; pg100.txt +excellently. pg100.txt +excelling; pg100.txt +excels. pg100.txt +except pg31100.txt, pg3200.txt, pg100.txt +except-- pg31100.txt +except. pg100.txt +excepted, pg3200.txt +excepted. pg100.txt +excepting pg31100.txt, pg3200.txt +exception pg3200.txt, pg100.txt +exception, pg31100.txt, pg3200.txt, pg100.txt +exception. pg3200.txt +exception." pg31100.txt +exceptional. pg3200.txt +exceptionally pg3200.txt +exceptions pg3200.txt +exceptions, pg31100.txt, pg3200.txt +exceptions." pg3200.txt +exceptions; pg3200.txt +excerpt: pg3200.txt +excess pg31100.txt, pg3200.txt, pg100.txt +excess! pg100.txt +excess, pg100.txt +excess. pg31100.txt, pg3200.txt, pg100.txt +excesses pg3200.txt +excessive pg3200.txt, pg100.txt +excessive. pg31100.txt +excessively pg31100.txt +excessively, pg31100.txt +excessively. pg31100.txt +exchang'd pg100.txt +exchange pg31100.txt, pg3200.txt, pg100.txt +exchange, pg100.txt +exchange. pg31100.txt, pg3200.txt, pg100.txt +exchange; pg100.txt +exchanged pg31100.txt, pg3200.txt +exchanged, pg3200.txt, pg100.txt +exchanged. pg3200.txt +exchanges pg3200.txt +exchanges, pg3200.txt +exchanging pg3200.txt +exchequer pg3200.txt +exchequer. pg100.txt +excite pg31100.txt, pg3200.txt +excite. pg31100.txt +excite; pg3200.txt +excited pg31100.txt, pg3200.txt +excited, pg31100.txt, pg3200.txt +excited. pg31100.txt, pg3200.txt +excited." pg3200.txt +excited? pg3200.txt +excitedly pg3200.txt +excitedly-- pg3200.txt +excitedly. pg3200.txt +excitement pg3200.txt +excitement! pg3200.txt +excitement!! pg3200.txt +excitement, pg3200.txt +excitement--because pg3200.txt +excitement. pg3200.txt +excitement.' pg3200.txt +excitement: pg3200.txt +excitement; pg3200.txt +excitement? pg3200.txt +excitements, pg3200.txt +exciting pg31100.txt, pg3200.txt +exciting. pg3200.txt +exciting." pg3200.txt +exciting; pg3200.txt +exclaim pg31100.txt, pg3200.txt, pg100.txt +exclaim, pg31100.txt, pg100.txt +exclaim. pg31100.txt, pg100.txt +exclaim: pg3200.txt +exclaim; pg31100.txt, pg3200.txt, pg100.txt +exclaimed pg31100.txt, pg3200.txt +exclaimed, pg31100.txt, pg3200.txt +exclaimed-- pg31100.txt, pg3200.txt +exclaimed. pg3200.txt +exclaimed: pg31100.txt, pg3200.txt +exclaiming pg31100.txt +exclaiming, pg31100.txt +exclaiming-- pg31100.txt, pg3200.txt +exclaiming: pg3200.txt +exclaims pg100.txt +exclaims, pg100.txt +exclaims. pg100.txt +exclaims: pg3200.txt +exclamation pg31100.txt, pg3200.txt +exclamation, pg3200.txt +exclamation-points pg3200.txt +exclamation. pg3200.txt, pg100.txt +exclamation: pg3200.txt +exclamation? pg100.txt +exclamations pg31100.txt, pg3200.txt +exclamations, pg31100.txt +exclamations. pg3200.txt, pg100.txt +exclamations; pg3200.txt +exclude pg3200.txt +excluded pg31100.txt, pg3200.txt +excluded. pg31100.txt +exclusions pg100.txt +exclusive pg3200.txt +exclusively pg31100.txt, pg3200.txt +excommunicate pg3200.txt +excommunicate; pg100.txt +excommunicated pg3200.txt +excommunicated--no pg3200.txt +excommunication pg3200.txt +excommunication. pg3200.txt +excommunication: pg3200.txt +excommunication; pg3200.txt +excrement pg100.txt +excrement. pg100.txt +excrement? pg100.txt +excrements, pg100.txt +excruciating pg3200.txt +excursion pg3200.txt, pg100.txt +excursion! pg3200.txt +excursion" pg3200.txt +excursion, pg3200.txt +excursion,--[see pg3200.txt +excursion--gold pg3200.txt +excursion. pg3200.txt +excursion." pg3200.txt +excursion.] pg3200.txt +excursion; pg3200.txt +excursioning pg3200.txt +excursionists pg3200.txt +excursionists, pg3200.txt +excursions pg31100.txt, pg3200.txt, pg100.txt +excursions--its pg3200.txt +excursions. pg3200.txt +excus'd pg100.txt +excus'd. pg100.txt +excusable pg31100.txt, pg3200.txt +excusable. pg31100.txt +excusably pg3200.txt +excuse pg31100.txt, pg3200.txt, pg100.txt +excuse' pg100.txt +excuse, pg100.txt +excuse. pg31100.txt, pg3200.txt, pg100.txt +excuse." pg31100.txt +excuse; pg3200.txt, pg100.txt +excuse? pg100.txt +excused pg31100.txt, pg3200.txt, pg100.txt +excused, pg3200.txt +excused. pg31100.txt, pg3200.txt, pg100.txt +excuses pg31100.txt, pg3200.txt, pg100.txt +excuses. pg100.txt +excuses." pg31100.txt +excuses; pg100.txt +excusing pg3200.txt +execrable pg3200.txt +execrated pg3200.txt +execrates pg3200.txt +execrations, pg100.txt +execrations." pg3200.txt +execute pg100.txt +execute), pg100.txt +execute, pg100.txt +execute- pg100.txt +execute. pg100.txt +executed pg31100.txt, pg3200.txt +executed, pg100.txt +executed. pg3200.txt, pg100.txt +executed." pg3200.txt +executing pg31100.txt +executing, pg100.txt +execution pg3200.txt, pg100.txt +execution! pg100.txt +execution, pg31100.txt, pg100.txt +execution. pg31100.txt, pg3200.txt, pg100.txt +execution." pg31100.txt +execution."] pg3200.txt +execution.' pg31100.txt +execution: pg100.txt +execution? pg100.txt +executioner pg3200.txt, pg100.txt +executioner, pg3200.txt, pg100.txt +executioner- pg100.txt +executioner. pg100.txt +executioner; pg100.txt +executioner? pg100.txt +executioners pg3200.txt, pg100.txt +executioners. pg100.txt +executive pg3200.txt +executive, pg3200.txt +executors pg3200.txt +exemplified. pg3200.txt +exemplify: pg31100.txt +exempt pg31100.txt, pg3200.txt, pg100.txt +exempt, pg100.txt +exercise pg31100.txt, pg3200.txt, pg100.txt +exercise, pg31100.txt, pg3200.txt +exercise. pg31100.txt, pg3200.txt, pg100.txt +exercise." pg31100.txt +exercise; pg31100.txt, pg100.txt +exercise? pg100.txt +exercised pg31100.txt, pg3200.txt +exercised, pg3200.txt +exercises pg3200.txt +exercises, pg3200.txt, pg100.txt +exercises." pg3200.txt +exercises; pg100.txt +exercising pg3200.txt +exert pg31100.txt +exerted pg31100.txt, pg3200.txt +exertion pg31100.txt, pg3200.txt +exertion, pg31100.txt +exertion. pg31100.txt, pg3200.txt +exertions pg31100.txt +exertions. pg3200.txt +exeter pg31100.txt, pg100.txt +exeter! pg100.txt +exeter, pg100.txt +exeter. pg31100.txt, pg100.txt +exeter;" pg31100.txt +exeter? pg100.txt +exeunt pg100.txt +exeunt. pg100.txt +exeunt] pg100.txt +exhalations? pg100.txt +exhale pg100.txt +exhaled pg3200.txt +exhales pg100.txt +exhalted pg3200.txt +exhaust pg3200.txt +exhausted pg31100.txt, pg3200.txt +exhausted, pg3200.txt +exhausted. pg3200.txt +exhausted; pg3200.txt +exhausting pg3200.txt +exhausting, pg3200.txt +exhaustion pg3200.txt +exhaustion, pg3200.txt +exhaustive, pg3200.txt +exhaustively pg3200.txt +exhaustively. pg3200.txt +exhaustlessly pg3200.txt +exhausts pg3200.txt +exhibit pg3200.txt +exhibit, pg3200.txt +exhibit. pg3200.txt +exhibit." pg31100.txt +exhibited pg3200.txt +exhibited, pg31100.txt, pg3200.txt +exhibited. pg3200.txt +exhibiting pg3200.txt +exhibition pg31100.txt, pg3200.txt, pg100.txt +exhibition, pg100.txt +exhibition-ground. pg3200.txt +exhibition. pg3200.txt +exhibition." pg31100.txt +exhibition; pg3200.txt +exhibitions pg3200.txt +exhibitions; pg3200.txt +exhibits pg3200.txt +exhilarate pg3200.txt +exhilarating, pg3200.txt +exhilaration pg3200.txt +exhilaration. pg3200.txt +exhumed pg3200.txt +exigence, pg31100.txt +exigent; pg100.txt +exigent? pg100.txt +exil'd. pg100.txt +exile pg31100.txt, pg3200.txt, pg100.txt +exile! pg3200.txt +exile, pg100.txt +exile. pg3200.txt +exile: pg100.txt +exile; pg3200.txt, pg100.txt +exile? pg3200.txt +exiled pg3200.txt +exiles pg3200.txt +exion pg100.txt +exist pg3200.txt +exist, pg3200.txt +exist. pg31100.txt, pg3200.txt +exist." pg31100.txt, pg3200.txt +exist; pg31100.txt, pg3200.txt +existed pg3200.txt +existed--in pg3200.txt +existed. pg31100.txt, pg3200.txt +existence pg31100.txt, pg3200.txt +existence, pg31100.txt, pg3200.txt +existence. pg31100.txt, pg3200.txt +existence." pg31100.txt, pg3200.txt +existence.' pg3200.txt +existence.] pg3200.txt +existence; pg3200.txt +existence? pg3200.txt +existent pg3200.txt +existent, pg3200.txt +existing pg3200.txt +exists pg31100.txt, pg3200.txt, pg100.txt +exists--over pg3200.txt +exists. pg3200.txt +exists? pg3200.txt +exit pg100.txt +exit, pg31100.txt +exit. pg3200.txt, pg100.txt +exit.] pg3200.txt +exit; pg31100.txt +exit] pg100.txt +exodus pg3200.txt +exonerated pg3200.txt +exorbitant; pg3200.txt +exorcism, pg3200.txt +exorcisms? pg100.txt +exorcist pg100.txt +expand pg3200.txt +expanded pg3200.txt +expanded, pg3200.txt +expanding pg3200.txt +expanse pg3200.txt +expanses pg3200.txt +expansion pg3200.txt +expansive pg3200.txt +expe'ence. pg3200.txt +expect pg31100.txt, pg3200.txt, pg100.txt +expect, pg31100.txt +expect. pg31100.txt, pg3200.txt, pg100.txt +expect..... pg3200.txt +expect; pg31100.txt +expect? pg3200.txt +expectancy pg3200.txt, pg100.txt +expectancy. pg3200.txt +expectant pg3200.txt +expectantly pg3200.txt +expectation pg31100.txt, pg3200.txt, pg100.txt +expectation, pg31100.txt, pg100.txt +expectation. pg31100.txt, pg100.txt +expectation." pg31100.txt +expectations pg31100.txt, pg3200.txt +expectations, pg31100.txt +expectations. pg31100.txt, pg3200.txt +expected pg31100.txt, pg3200.txt +expected!" pg3200.txt +expected, pg31100.txt, pg3200.txt +expected--but pg3200.txt +expected--often pg3200.txt +expected--you pg3200.txt +expected. pg31100.txt, pg3200.txt, pg100.txt +expected." pg31100.txt, pg3200.txt +expected.' pg3200.txt +expected.--very pg31100.txt +expected; pg31100.txt, pg3200.txt +expected?" pg31100.txt +expecting pg31100.txt, pg3200.txt, pg100.txt +expecting! pg31100.txt +expecting, pg31100.txt +expecting--" pg3200.txt +expecting--but pg3200.txt +expecting. pg3200.txt +expecting." pg31100.txt +expecto--" pg3200.txt +expects pg3200.txt, pg100.txt +expects. pg100.txt +expedience, pg100.txt +expedience. pg100.txt +expediencies pg3200.txt +expediency pg31100.txt, pg3200.txt +expediency--must pg3200.txt +expedient, pg31100.txt +expedient." pg3200.txt +expedients, pg3200.txt +expedite pg3200.txt +expedition pg31100.txt, pg3200.txt, pg100.txt +expedition! pg100.txt +expedition, pg3200.txt +expedition--" pg3200.txt +expedition--a pg3200.txt +expedition. pg3200.txt, pg100.txt +expedition." pg3200.txt +expedition; pg100.txt +expedition? pg100.txt +expeditions, pg3200.txt +expeditiously pg31100.txt +expel pg31100.txt, pg100.txt +expelled pg3200.txt +expelled. pg3200.txt +expelled." pg3200.txt +expels pg100.txt +expence--if pg3200.txt +expences, pg31100.txt +expenctancy. pg3200.txt +expended pg3200.txt +expenditures pg3200.txt +expense pg31100.txt, pg3200.txt, pg100.txt +expense, pg31100.txt, pg3200.txt, pg100.txt +expense--" pg3200.txt +expense. pg31100.txt, pg3200.txt, pg100.txt +expense." pg31100.txt +expense.' pg31100.txt +expense; pg3200.txt +expense? pg3200.txt +expenses pg31100.txt, pg3200.txt +expenses!" pg3200.txt +expenses, pg3200.txt +expenses. pg3200.txt +expenses." pg31100.txt +expenses; pg3200.txt +expensive pg31100.txt, pg3200.txt +expensive, pg31100.txt, pg3200.txt +expensive. pg3200.txt +expensively pg3200.txt +experience pg31100.txt, pg3200.txt +experience, pg31100.txt, pg3200.txt, pg100.txt +experience. pg3200.txt, pg100.txt +experience." pg3200.txt +experience.) pg3200.txt +experience; pg3200.txt, pg100.txt +experience? pg100.txt +experience?" pg3200.txt +experience?' pg3200.txt +experienced pg31100.txt, pg3200.txt +experienced, pg31100.txt +experienced; pg3200.txt +experiences pg3200.txt +experiences, pg3200.txt +experiences: pg3200.txt +experiment pg31100.txt, pg3200.txt +experiment, pg3200.txt +experiment. pg3200.txt, pg100.txt +experiment? pg3200.txt +experimental pg3200.txt +experimented pg3200.txt +experimenter pg3200.txt +experimenter--and pg3200.txt +experimenters pg3200.txt +experimenting, pg3200.txt +experiments pg3200.txt +experiments, pg3200.txt +experiments. pg3200.txt, pg100.txt +expert pg3200.txt +expert, pg3200.txt +expert. pg3200.txt +expert." pg3200.txt +expertness pg100.txt +experts pg3200.txt +experts--experts pg3200.txt +expiate. pg100.txt +expiation, pg100.txt +expinsive." pg3200.txt +expir'd, pg100.txt +expire pg3200.txt +expire, pg100.txt +expire. pg3200.txt +expired. pg100.txt +expired; pg3200.txt +expires; pg3200.txt +explain pg31100.txt, pg3200.txt +explain!" pg3200.txt +explain, pg3200.txt +explain--everybody pg3200.txt +explain. pg31100.txt, pg3200.txt +explain." pg31100.txt, pg3200.txt +explain? pg3200.txt +explain?" pg3200.txt +explain?' pg3200.txt +explained pg31100.txt, pg3200.txt +explained, pg3200.txt +explained. pg31100.txt, pg3200.txt +explained." pg3200.txt +explained: pg31100.txt, pg3200.txt +explained; pg3200.txt +explaining pg3200.txt +explaining. pg3200.txt +explains pg3200.txt +explanation pg31100.txt, pg3200.txt +explanation, pg31100.txt +explanation. pg31100.txt, pg3200.txt +explanation." pg31100.txt +explanation: pg3200.txt +explanation; pg3200.txt +explanations pg31100.txt, pg3200.txt +explanations, pg31100.txt, pg3200.txt +explanations. pg31100.txt +explanatory pg31100.txt, pg3200.txt +expletives pg3200.txt +expletives, pg3200.txt +expletives." pg3200.txt +explicit. pg31100.txt +explicit." pg31100.txt +exploded pg3200.txt +exploding pg3200.txt +exploit pg100.txt +exploit, pg3200.txt, pg100.txt +exploit--the pg3200.txt +exploit. pg3200.txt, pg100.txt +exploiting pg3200.txt +exploits pg31100.txt, pg3200.txt +exploits), pg3200.txt +exploits. pg3200.txt, pg100.txt +exploration pg3200.txt +explore pg3200.txt +explore. pg3200.txt +explored pg3200.txt +explorers pg3200.txt +explorers, pg3200.txt +explorers--cook? pg3200.txt +exploring pg31100.txt, pg3200.txt +explosion pg3200.txt +explosion, pg3200.txt +explosion. pg3200.txt +explosion; pg3200.txt +explosion?" pg3200.txt +explosions pg3200.txt +explosions" pg3200.txt +explosions, pg3200.txt +explosive pg3200.txt +explosive; pg3200.txt +exponents pg3200.txt +export pg3200.txt +export. pg3200.txt +exporter pg3200.txt +exports pg3200.txt +exports; pg3200.txt +expos'd pg100.txt +expos'd, pg100.txt +expose pg31100.txt, pg3200.txt, pg100.txt +exposed pg31100.txt, pg3200.txt +exposed, pg31100.txt +exposed. pg31100.txt +exposing pg31100.txt, pg3200.txt +exposition pg3200.txt, pg100.txt +exposition. pg3200.txt, pg100.txt +expositions. pg3200.txt +expositor, pg100.txt +expostulate pg31100.txt, pg100.txt +expostulate, pg100.txt +expostulate. pg100.txt +expostulation pg31100.txt +exposure pg3200.txt +exposure, pg3200.txt, pg100.txt +exposure--by pg3200.txt +exposure--think pg3200.txt +exposure. pg3200.txt +exposure." pg3200.txt +exposure: pg3200.txt +exposure?" pg3200.txt +exposures. pg3200.txt +expound pg100.txt +expounded pg100.txt +express pg31100.txt, pg3200.txt, pg100.txt +express'd pg100.txt +express, pg100.txt +express-train. pg3200.txt +express. pg31100.txt, pg3200.txt, pg100.txt +express." pg31100.txt +express.) pg3200.txt +express? pg3200.txt, pg100.txt +express?" pg3200.txt +expressed pg31100.txt, pg3200.txt +expressed, pg31100.txt, pg3200.txt, pg100.txt +expressed. pg31100.txt, pg3200.txt, pg100.txt +expressed." pg31100.txt +expressed; pg31100.txt, pg100.txt +expresses pg31100.txt, pg3200.txt +expressing pg31100.txt, pg3200.txt +expressing, pg100.txt +expression pg31100.txt, pg3200.txt +expression!" pg3200.txt +expression, pg31100.txt, pg3200.txt +expression--everything. pg3200.txt +expression. pg31100.txt, pg3200.txt +expression." pg31100.txt +expression.' pg3200.txt +expression; pg31100.txt, pg3200.txt +expressions pg31100.txt, pg3200.txt +expressions. pg3200.txt +expressive pg31100.txt, pg3200.txt +expressive. pg3200.txt +expressively pg31100.txt +expressively, pg31100.txt +expressly pg31100.txt, pg3200.txt +expressly. pg31100.txt +expressly? pg100.txt +expressman pg3200.txt +expropriated pg3200.txt +expulsion pg3200.txt, pg100.txt +expunged pg3200.txt +exquisite pg31100.txt, pg3200.txt, pg100.txt +exquisite!" pg3200.txt +exquisite. pg31100.txt, pg3200.txt +exquisite." pg31100.txt +exquisite; pg3200.txt +exquisitely pg31100.txt, pg3200.txt +exquisiteness pg3200.txt +extacies pg31100.txt +extant pg31100.txt, pg3200.txt +extant. pg3200.txt, pg100.txt +extant? pg100.txt +extemporaneous. pg3200.txt +extemporaneousness." pg3200.txt +extempore? pg100.txt +extend pg31100.txt, pg3200.txt, pg100.txt +extend, pg100.txt +extend. pg100.txt +extended pg31100.txt, pg3200.txt, pg100.txt +extended. pg31100.txt +extending pg31100.txt, pg3200.txt +extends pg3200.txt +extends, pg100.txt +extension pg3200.txt +extensions pg3200.txt +extensive pg31100.txt, pg3200.txt +extensive, pg3200.txt +extensive. pg3200.txt +extensive.' pg3200.txt +extensive; pg31100.txt +extensively. pg3200.txt +extensively; pg3200.txt +extent pg31100.txt, pg3200.txt, pg100.txt +extent, pg3200.txt +extent. pg31100.txt, pg3200.txt +extent."--the pg31100.txt +extent; pg3200.txt +extenuate, pg100.txt +extenuate- pg100.txt +extenuation pg31100.txt +extenuation. pg31100.txt +extenuations. pg31100.txt +exterior pg31100.txt, pg3200.txt +exteriorly, pg100.txt +exteriorly. pg3200.txt +exteriors pg31100.txt, pg100.txt +extermin'd. pg100.txt +exterminate pg3200.txt +exterminated pg3200.txt +exterminated. pg3200.txt +exterminating pg3200.txt +extermination pg3200.txt +extermination, pg3200.txt +extinct pg3200.txt +extinct. pg3200.txt +extinction pg3200.txt +extinction. pg3200.txt +extinctions pg3200.txt +extinguished pg3200.txt +extinguished, pg3200.txt +extinguished. pg3200.txt +extinguisher. pg3200.txt +extoll'd, pg100.txt +extolled pg3200.txt +exton pg100.txt +exton, pg100.txt +extort pg100.txt +extorted pg31100.txt +extorting pg31100.txt +extortion; pg100.txt +extortions. pg100.txt +extra pg3200.txt +extra. pg3200.txt +extract pg3200.txt +extract, pg3200.txt +extract. pg3200.txt +extract: pg3200.txt +extracted pg3200.txt, pg100.txt +extracted. pg3200.txt +extracting pg3200.txt +extracts pg3200.txt +extracts." pg31100.txt +extracts: pg3200.txt +extracts:-- pg3200.txt +extraordi--" pg3200.txt +extraordinarily. pg31100.txt, pg100.txt +extraordinary pg31100.txt, pg3200.txt +extraordinary! pg31100.txt +extraordinary!--and pg31100.txt +extraordinary, pg31100.txt, pg3200.txt, pg100.txt +extraordinary--" pg31100.txt +extraordinary. pg31100.txt, pg3200.txt +extraordinary; pg31100.txt +extras pg3200.txt +extras, pg3200.txt +extraught, pg100.txt +extravagance pg31100.txt, pg3200.txt +extravagance, pg31100.txt +extravagance. pg31100.txt, pg3200.txt +extravagance." pg3200.txt +extravagant pg31100.txt, pg3200.txt +extravagant, pg3200.txt +extravagant--it pg3200.txt +extravagant. pg3200.txt +extravagant." pg31100.txt +extravagant; pg31100.txt +extravagantly pg3200.txt +extreme pg31100.txt, pg3200.txt +extreme, pg31100.txt, pg100.txt +extreme. pg31100.txt +extreme.' pg3200.txt +extremely pg31100.txt, pg3200.txt +extremely, pg100.txt +extremely. pg31100.txt +extremes pg3200.txt, pg100.txt +extremes. pg3200.txt, pg100.txt +extremes? pg100.txt +extremities pg3200.txt, pg100.txt +extremities!' pg3200.txt +extremities. pg3200.txt +extremities; pg100.txt +extremity pg31100.txt, pg3200.txt, pg100.txt +extremity, pg3200.txt, pg100.txt +extremity. pg3200.txt, pg100.txt +extremity; pg100.txt +extricating pg3200.txt +exuberant pg3200.txt +exult pg31100.txt, pg3200.txt +exultant pg3200.txt +exultant. pg3200.txt +exultation pg31100.txt, pg3200.txt, pg100.txt +exultation, pg31100.txt +exultation-- pg31100.txt +exultation. pg3200.txt +exultation: pg3200.txt +exulted pg3200.txt +exulting pg31100.txt +exultingly. pg31100.txt +ey'd pg100.txt +ey'd. pg100.txt +eye pg31100.txt, pg3200.txt, pg100.txt +eye! pg31100.txt, pg3200.txt, pg100.txt +eye!" pg3200.txt +eye), pg3200.txt +eye, pg31100.txt, pg3200.txt, pg100.txt +eye,--"your pg31100.txt +eye- pg100.txt +eye-- pg3200.txt +eye--and pg3200.txt +eye-glance, pg3200.txt +eye-glass pg100.txt +eye-lids. pg3200.txt +eye-opener?" pg3200.txt +eye-water pg3200.txt +eye-water--and pg3200.txt +eye-water. pg3200.txt +eye-water; pg3200.txt +eye-witnesses pg3200.txt +eye-witnesses, pg3200.txt +eye. pg31100.txt, pg3200.txt, pg100.txt +eye." pg3200.txt +eye.' pg31100.txt, pg100.txt +eye: pg3200.txt, pg100.txt +eye; pg31100.txt, pg3200.txt, pg100.txt +eye? pg100.txt +eye?" pg31100.txt, pg3200.txt +eyebrows pg3200.txt +eyebrows, pg31100.txt, pg3200.txt +eyebrows. pg3200.txt, pg100.txt +eyebrows? pg100.txt +eyed pg31100.txt, pg3200.txt +eyed, pg100.txt +eyeing pg31100.txt +eyelashes. pg3200.txt +eyelid. pg100.txt +eyelids pg100.txt +eyes pg31100.txt, pg3200.txt, pg100.txt +eyes! pg3200.txt, pg100.txt +eyes) pg100.txt +eyes, pg31100.txt, pg3200.txt, pg100.txt +eyes,) pg3200.txt +eyes- pg100.txt +eyes--and pg31100.txt +eyes--humbly pg3200.txt +eyes--look pg3200.txt +eyes--we pg31100.txt +eyes. pg31100.txt, pg3200.txt, pg100.txt +eyes." pg31100.txt, pg3200.txt +eyes.' pg3200.txt, pg100.txt +eyes: pg3200.txt, pg100.txt +eyes; pg31100.txt, pg3200.txt, pg100.txt +eyes? pg3200.txt, pg100.txt +eyes?" pg31100.txt, pg3200.txt +eyes] pg100.txt +eyes_. pg31100.txt +eyesight pg3200.txt +eyesight? pg100.txt +eying pg3200.txt +eyne pg100.txt +eyne! pg100.txt +eyne, pg100.txt +eyne. pg100.txt +eyne? pg100.txt +ezekiel, pg3200.txt +f'm pg3200.txt +f--, pg31100.txt +f----" pg3200.txt +f. pg31100.txt, pg3200.txt +f." pg31100.txt, pg3200.txt +f., pg3200.txt +f.] pg3200.txt +fa. pg100.txt +fabian pg100.txt +fabian. pg100.txt +fable pg3200.txt +fable--that pg31100.txt +fable. pg100.txt +fable: pg100.txt +fable; pg3200.txt +fabled pg3200.txt +fables pg3200.txt +fables, pg3200.txt +fabric pg3200.txt +fabric, pg3200.txt +fabrics pg3200.txt +fac pg3200.txt +fac'." pg3200.txt +fac'd pg100.txt +facade, pg3200.txt +facchini; pg3200.txt +face pg31100.txt, pg3200.txt, pg100.txt +face! pg3200.txt, pg100.txt +face!' pg3200.txt +face, pg31100.txt, pg3200.txt, pg100.txt +face- pg100.txt +face-- pg100.txt +face--and pg3200.txt +face--it's pg3200.txt +face. pg31100.txt, pg3200.txt, pg100.txt +face." pg31100.txt, pg3200.txt +face: pg3200.txt, pg100.txt +face; pg3200.txt, pg100.txt +face? pg3200.txt, pg100.txt +face] pg100.txt +faced pg3200.txt +faces pg31100.txt, pg3200.txt, pg100.txt +faces! pg3200.txt, pg100.txt +faces, pg3200.txt, pg100.txt +faces. pg3200.txt, pg100.txt +faces.' pg3200.txt +faces; pg100.txt +faces? pg100.txt +faces] pg100.txt +facetious pg3200.txt +facetious. pg3200.txt +facetiousness pg3200.txt +facetiousness. pg3200.txt +fach'. pg3200.txt +facilitate pg3200.txt +facilities pg3200.txt +facility pg3200.txt +facility--pity pg3200.txt +facility. pg3200.txt, pg100.txt +facility: pg31100.txt, pg3200.txt, pg100.txt +facing pg3200.txt +facing. pg100.txt +facit pg100.txt +facons pg3200.txt +facsimile pg3200.txt +facsimiles pg3200.txt +fact pg31100.txt, pg3200.txt, pg100.txt +fact! pg3200.txt, pg100.txt +fact" pg3200.txt +fact, pg31100.txt, pg3200.txt, pg100.txt +fact,--only pg3200.txt +fact-- pg3200.txt +fact--and pg3200.txt +fact--fact pg3200.txt +fact--that pg3200.txt +fact. pg31100.txt, pg3200.txt, pg100.txt +fact." pg31100.txt, pg3200.txt +fact.' pg3200.txt +fact.] pg3200.txt +fact: pg3200.txt +fact; pg31100.txt, pg3200.txt +faction pg100.txt +faction! pg100.txt +faction, pg100.txt +faction. pg100.txt +faction; pg100.txt +faction? pg100.txt +factions. pg3200.txt, pg100.txt +factions; pg100.txt +factories pg3200.txt +factories, pg3200.txt +factories. pg3200.txt +factors pg3200.txt +factory pg3200.txt +factory, pg3200.txt +factory--" pg3200.txt +factory. pg3200.txt +factory." pg3200.txt +factory? pg3200.txt +factory?" pg3200.txt +factry." pg3200.txt +facts pg3200.txt +facts, pg3200.txt +facts--and pg3200.txt +facts. pg3200.txt +facts." pg3200.txt +facts.] pg3200.txt +facts: pg3200.txt +facts; pg3200.txt +facts?" pg3200.txt +faculties pg3200.txt, pg100.txt +faculties, pg3200.txt +faculties. pg3200.txt +faculties." pg31100.txt +faculty pg3200.txt +faculty, pg3200.txt +faculty. pg3200.txt, pg100.txt +faculty; pg3200.txt +fade pg3200.txt, pg100.txt +fade! pg100.txt +fade, pg3200.txt, pg100.txt +fade. pg3200.txt +faded pg3200.txt +faded, pg3200.txt, pg100.txt +fades pg3200.txt +fading pg3200.txt +fading, pg3200.txt +fads pg3200.txt +fag-end pg3200.txt +fagged pg3200.txt +fagged, pg31100.txt, pg3200.txt +fagged: pg3200.txt +fagnani pg3200.txt +fagots pg3200.txt +fahrenheit. pg3200.txt +fail pg31100.txt, pg3200.txt, pg100.txt +fail'd pg100.txt +fail'd, pg100.txt +fail'd. pg100.txt +fail, pg3200.txt, pg100.txt +fail--" pg3200.txt +fail. pg31100.txt, pg3200.txt, pg100.txt +fail." pg3200.txt +fail; pg100.txt +fail? pg100.txt +failed pg3200.txt +failed! pg3200.txt +failed, pg31100.txt, pg3200.txt +failed--and pg3200.txt +failed--half pg3200.txt +failed. pg31100.txt, pg3200.txt +failed." pg3200.txt +failed: pg3200.txt +failed; pg3200.txt +failing pg31100.txt, pg3200.txt, pg100.txt +failing, pg100.txt +failing. pg3200.txt +failings pg31100.txt, pg3200.txt +failings, pg3200.txt +fails pg3200.txt, pg100.txt +fails, pg3200.txt, pg100.txt +fails. pg3200.txt, pg100.txt +failure pg3200.txt +failure, pg31100.txt, pg3200.txt +failure--he pg3200.txt +failure. pg3200.txt +failure; pg3200.txt +failures pg3200.txt +failures, pg3200.txt +failures. pg3200.txt +failures; pg3200.txt +fain pg100.txt +fain; pg100.txt +faint pg31100.txt, pg3200.txt +faint, pg3200.txt +faint--" pg31100.txt +faint--and pg3200.txt +faint. pg3200.txt, pg100.txt +faint." pg3200.txt +faint: pg3200.txt +faint; pg3200.txt, pg100.txt +faint?" pg3200.txt +fainted pg3200.txt +fainted, pg100.txt +fainted. pg3200.txt +fainted; pg3200.txt +fainter pg31100.txt, pg3200.txt +faintest pg3200.txt +fainting pg3200.txt +fainting. pg3200.txt +fainting: pg31100.txt +faintly pg3200.txt +faintly: pg3200.txt +faintly? pg100.txt +faints pg100.txt +faints. pg100.txt +faints] pg100.txt +fair pg31100.txt, pg3200.txt, pg100.txt +fair! pg100.txt +fair!" pg3200.txt +fair!- pg100.txt +fair'- pg100.txt +fair, pg31100.txt, pg3200.txt, pg100.txt +fair," pg3200.txt +fair-fac'd, pg100.txt +fair-judgment, pg100.txt +fair-minded pg3200.txt +fair-minded, pg3200.txt +fair. pg31100.txt, pg3200.txt, pg100.txt +fair." pg31100.txt, pg3200.txt +fair.' pg3200.txt +fair: pg100.txt +fair; pg3200.txt, pg100.txt +fair? pg100.txt +fair?" pg31100.txt, pg3200.txt +fairchild, pg3200.txt +faire pg3200.txt +fairer pg100.txt +fairest pg100.txt +fairest, pg3200.txt, pg100.txt +fairest,--according pg3200.txt +fairfax pg31100.txt, pg3200.txt +fairfax! pg31100.txt +fairfax!" pg31100.txt +fairfax's pg31100.txt +fairfax, pg31100.txt +fairfax--" pg31100.txt +fairfax. pg31100.txt +fairfax." pg31100.txt +fairfax.-- pg31100.txt +fairfax; pg31100.txt +fairfax?" pg31100.txt +fairies pg3200.txt, pg100.txt +fairies' pg3200.txt +fairies, pg3200.txt, pg100.txt +fairies--come pg3200.txt +fairies. pg3200.txt, pg100.txt +fairies?" pg3200.txt +fairly pg31100.txt, pg3200.txt, pg100.txt +fairly. pg3200.txt +fairly? pg3200.txt +fairlyand--such pg3200.txt +fairness pg3200.txt +fairness, pg3200.txt +fairs. pg100.txt +fairs; pg3200.txt, pg100.txt +fairy pg3200.txt, pg100.txt +fairy- pg3200.txt +fairy-banishing pg3200.txt +fairy. pg100.txt +fairyland pg3200.txt +fairyland! pg3200.txt +fairyland. pg3200.txt +fairyland; pg3200.txt +faith pg31100.txt, pg3200.txt, pg100.txt +faith! pg3200.txt, pg100.txt +faith, pg3200.txt, pg100.txt +faith- pg100.txt +faith-breach; pg100.txt +faith-curists, pg3200.txt +faith. pg3200.txt, pg100.txt +faith." pg3200.txt +faith: pg100.txt +faith; pg3200.txt, pg100.txt +faith? pg100.txt +faithful pg31100.txt, pg3200.txt +faithful! pg100.txt +faithful, pg3200.txt +faithfuler pg3200.txt +faithfuller, pg3200.txt +faithfully pg31100.txt, pg3200.txt +faithfully, pg31100.txt, pg3200.txt +faithfully. pg3200.txt, pg100.txt +faithfully; pg100.txt +faithfully? pg3200.txt, pg100.txt +faithfulness pg3200.txt +faithfulness. pg3200.txt +faiths, pg100.txt +faitors! pg100.txt +fakeer, pg3200.txt +falaise. pg3200.txt +falchion pg100.txt +falchion. pg100.txt +falcon, pg3200.txt +falconbridge, pg100.txt +falconbridge. pg100.txt +falconers, pg100.txt +falkenhayn pg3200.txt +fall pg31100.txt, pg3200.txt, pg100.txt +fall! pg100.txt +fall'n pg100.txt +fall'n, pg100.txt +fall'n. pg100.txt +fall, pg3200.txt, pg100.txt +fall," pg3200.txt +fall- pg100.txt +fall. pg3200.txt, pg100.txt +fall." pg3200.txt +fall..... pg3200.txt +fall; pg31100.txt, pg100.txt +fall? pg100.txt +fall?" pg3200.txt +fall] pg100.txt +fallacy. pg100.txt +fallen pg31100.txt, pg3200.txt +fallen!" pg3200.txt +fallen!' pg3200.txt +fallen, pg31100.txt, pg3200.txt +fallibility: pg3200.txt +falling pg31100.txt, pg3200.txt, pg100.txt +falling, pg3200.txt +falling. pg3200.txt +falling?' pg3200.txt +fallkill pg3200.txt +fallkill, pg3200.txt +fallkill. pg3200.txt +fallkill." pg3200.txt +fallkill? pg3200.txt +fallkill?" pg3200.txt +falls pg3200.txt, pg100.txt +falls, pg31100.txt, pg100.txt +falls- pg100.txt +falls--why, pg3200.txt +falls. pg3200.txt, pg100.txt +falls.] pg100.txt +falls; pg100.txt +falls] pg100.txt +fallut-il pg3200.txt +falmouth-london,...............350 pg3200.txt +false pg31100.txt, pg3200.txt, pg100.txt +false! pg31100.txt, pg100.txt +false, pg3200.txt, pg100.txt +false--succor pg3200.txt +false. pg3200.txt, pg100.txt +false." pg31100.txt, pg3200.txt +false; pg100.txt +false? pg100.txt +false?" pg3200.txt +falsehood pg31100.txt, pg3200.txt, pg100.txt +falsehood! pg100.txt +falsehood!"--and pg31100.txt +falsehood), pg3200.txt +falsehood, pg3200.txt, pg100.txt +falsehood- pg100.txt +falsehood. pg31100.txt, pg3200.txt, pg100.txt +falsehood." pg31100.txt +falsehood."--exchange. pg3200.txt +falsehoods, pg3200.txt +falsely pg3200.txt, pg100.txt +falsely, pg3200.txt +falsely. pg100.txt +falseness; pg100.txt +falser: pg100.txt +falsing. pg100.txt +falstaff pg3200.txt, pg100.txt +falstaff! pg100.txt +falstaff's pg100.txt +falstaff's.' pg100.txt +falstaff, pg100.txt +falstaff. pg100.txt +falstaff.' pg100.txt +falstaff? pg100.txt +falstaff] pg100.txt +falstaffs, pg100.txt +falter pg3200.txt +faltered pg3200.txt +faltering pg3200.txt +fam'd. pg100.txt +fambly pg3200.txt +fame pg3200.txt, pg100.txt +fame! pg100.txt +fame, pg3200.txt, pg100.txt +fame. pg3200.txt, pg100.txt +fame." pg3200.txt +fame: pg100.txt +fame; pg100.txt +familiar pg3200.txt, pg100.txt +familiar! pg100.txt +familiar, pg3200.txt, pg100.txt +familiar. pg3200.txt +familiar." pg3200.txt +familiar; pg3200.txt +familiarised pg31100.txt +familiarity pg31100.txt, pg3200.txt, pg100.txt +familiarity- pg100.txt +familiarity. pg3200.txt +familiarly pg31100.txt, pg3200.txt +familiarly. pg3200.txt +familiarly." pg3200.txt +familiars, pg100.txt +families pg31100.txt, pg3200.txt +families, pg31100.txt, pg3200.txt +families. pg31100.txt, pg3200.txt +families." pg31100.txt +families: pg3200.txt +families; pg31100.txt +family pg31100.txt, pg3200.txt +family! pg31100.txt +family!" pg31100.txt, pg3200.txt +family's pg3200.txt +family, pg31100.txt, pg3200.txt, pg100.txt +family-- pg3200.txt +family--" pg3200.txt +family--and pg31100.txt +family--don't pg3200.txt +family--dresses pg3200.txt +family--one pg3200.txt +family--personally." pg3200.txt +family--the pg3200.txt +family-estate--was pg31100.txt +family-piece. pg31100.txt +family. pg31100.txt, pg3200.txt, pg100.txt +family." pg31100.txt, pg3200.txt +family: pg3200.txt +family; pg31100.txt, pg3200.txt, pg100.txt +family? pg31100.txt, pg3200.txt, pg100.txt +family?" pg31100.txt, pg3200.txt +familywise pg3200.txt +famine pg3200.txt +famine, pg3200.txt, pg100.txt +famine. pg3200.txt, pg100.txt +famine." pg3200.txt +famine; pg3200.txt +famish pg3200.txt +famish'd, pg100.txt +famish? pg100.txt +famished pg3200.txt +famished; pg3200.txt +famishing pg3200.txt +famishing--but pg3200.txt +famous pg31100.txt, pg3200.txt +famous, pg100.txt +famous. pg3200.txt +famous." pg3200.txt +famously pg100.txt +famously." pg31100.txt +fan pg31100.txt, pg3200.txt, pg100.txt +fan! pg100.txt +fan, pg100.txt +fan-tods." pg3200.txt +fan. pg3200.txt +fan?" pg31100.txt +fanatic. pg3200.txt +fanatics. pg3200.txt +fancied pg31100.txt, pg3200.txt +fancied,) pg3200.txt +fancied. pg31100.txt +fancies pg31100.txt, pg3200.txt, pg100.txt +fancies, pg100.txt +fancies. pg31100.txt, pg100.txt +fancies; pg3200.txt +fancies? pg3200.txt +fanciful pg3200.txt +fancy pg31100.txt, pg3200.txt, pg100.txt +fancy, pg31100.txt, pg3200.txt, pg100.txt +fancy,) pg3200.txt +fancy-free. pg100.txt +fancy-topped pg3200.txt +fancy. pg31100.txt, pg3200.txt, pg100.txt +fancy." pg31100.txt, pg3200.txt +fancy; pg100.txt +fancy?" pg31100.txt +fancying pg31100.txt, pg3200.txt +fandango pg3200.txt +fane pg3200.txt +fane; pg3200.txt +fang pg100.txt +fang'd, pg100.txt +fangs!" pg3200.txt +fangs. pg3200.txt, pg100.txt +fangs; pg100.txt +fanning pg3200.txt, pg100.txt +fanning. pg100.txt +fanny pg31100.txt, pg3200.txt +fanny! pg31100.txt +fanny's pg31100.txt +fanny). pg31100.txt +fanny, pg31100.txt +fanny," pg31100.txt +fanny. pg31100.txt +fanny." pg31100.txt +fanny: pg31100.txt +fanny; pg31100.txt +fanny? pg31100.txt +fanny?" pg31100.txt +fans pg3200.txt +fans. pg3200.txt +fanshaw pg3200.txt +fanshaw. pg3200.txt +fantasied; pg100.txt +fantasies, pg100.txt +fantasies. pg100.txt +fantastic pg3200.txt, pg100.txt +fantastical, pg100.txt +fantastical. pg100.txt +fantasticoes- pg100.txt +fantastics. pg3200.txt +fantasy pg100.txt +fantasy! pg100.txt +fantasy, pg3200.txt, pg100.txt +fantasy. pg100.txt +fantasy; pg100.txt +fantasy? pg100.txt +fantods. pg3200.txt +far pg31100.txt, pg3200.txt, pg100.txt +far! pg100.txt +far!" pg3200.txt +far, pg31100.txt, pg3200.txt, pg100.txt +far- pg3200.txt, pg100.txt +far-fetched, pg3200.txt +far-off pg3200.txt +far-reaching pg3200.txt +far-seeing pg3200.txt +far-shooting pg3200.txt +far-spread pg3200.txt +far-stretching pg3200.txt +far. pg31100.txt, pg3200.txt, pg100.txt +far." pg31100.txt, pg3200.txt +far; pg31100.txt, pg3200.txt +far? pg100.txt +far?" pg3200.txt +farce pg3200.txt +farce. pg3200.txt +farcically pg3200.txt +farcillo! pg3200.txt +farcillo, pg3200.txt +farcillo. pg3200.txt +fardel pg100.txt +fare pg3200.txt, pg100.txt +fare, pg31100.txt, pg3200.txt, pg100.txt +fare," pg3200.txt +fare--endu--rance pg3200.txt +fare. pg31100.txt, pg3200.txt +fare." pg3200.txt +fare; pg3200.txt +fare? pg100.txt +fared pg3200.txt +fared. pg3200.txt +fares pg3200.txt +fares. pg100.txt +farewell pg31100.txt, pg3200.txt, pg100.txt +farewell! pg3200.txt, pg100.txt +farewell, pg31100.txt, pg3200.txt, pg100.txt +farewell--of pg31100.txt +farewell. pg3200.txt, pg100.txt +farewell." pg3200.txt, pg100.txt +farewell.- pg100.txt +farewell; pg100.txt +farewells, pg3200.txt +farewells; pg100.txt +farfetched pg3200.txt +farm pg3200.txt, pg100.txt +farm!--_you_ pg31100.txt +farm)--atlantic. pg3200.txt +farm). pg3200.txt +farm, pg31100.txt, pg3200.txt +farm-- pg3200.txt +farm-house, pg3200.txt +farm-yard. pg31100.txt +farm. pg3200.txt, pg100.txt +farm." pg3200.txt +farm: pg3200.txt +farm; pg31100.txt +farm?" pg31100.txt, pg3200.txt +farm] pg3200.txt +farmeopath pg3200.txt +farmeopath. pg3200.txt +farmer pg3200.txt +farmer's pg31100.txt +farmer, pg31100.txt, pg3200.txt +farmer--especially pg3200.txt +farmer. pg3200.txt +farmer; pg3200.txt +farmers pg3200.txt +farmers, pg3200.txt +farmhand pg3200.txt +farmhouse pg3200.txt +farmhouse; pg3200.txt +farmhouses, pg3200.txt +farming pg3200.txt +farms pg3200.txt +farms, pg3200.txt, pg100.txt +farms. pg3200.txt +fart, pg3200.txt +farther pg31100.txt, pg3200.txt, pg100.txt +farther, pg31100.txt, pg3200.txt, pg100.txt +farther. pg31100.txt, pg3200.txt, pg100.txt +farther." pg31100.txt +farther.--she pg31100.txt +farther; pg31100.txt, pg100.txt +farther?" pg31100.txt +farthest pg3200.txt +farthest, pg100.txt +farthest-gone pg3200.txt +farthest. pg100.txt +farthest? pg100.txt +farthing pg31100.txt, pg3200.txt +farthing--next!"--when pg3200.txt +farthing. pg3200.txt, pg100.txt +farthingale.' pg100.txt +farthingale? pg100.txt +farthings, pg3200.txt +farthings." pg3200.txt +fascinated pg3200.txt +fascinating pg3200.txt +fascinating. pg3200.txt +fascination pg31100.txt, pg3200.txt +fascination! pg3200.txt +fascination. pg3200.txt +fascination: pg31100.txt +fascinations pg3200.txt +fascinations." pg3200.txt +fascinations; pg3200.txt +fashion pg31100.txt, pg3200.txt, pg100.txt +fashion! pg31100.txt, pg100.txt +fashion'd pg100.txt +fashion, pg31100.txt, pg3200.txt, pg100.txt +fashion-- pg3200.txt +fashion--and pg3200.txt +fashion--in pg3200.txt +fashion. pg31100.txt, pg3200.txt, pg100.txt +fashion." pg31100.txt +fashion: pg3200.txt +fashion; pg3200.txt, pg100.txt +fashion? pg100.txt +fashionable pg31100.txt, pg3200.txt +fashionable, pg3200.txt +fashionables. pg3200.txt +fashionably pg3200.txt +fashioned pg3200.txt +fashions, pg100.txt +fashions. pg31100.txt +fast pg31100.txt, pg3200.txt, pg100.txt +fast! pg3200.txt +fast!" pg3200.txt +fast, pg31100.txt, pg3200.txt, pg100.txt +fast. pg3200.txt, pg100.txt +fast." pg31100.txt, pg3200.txt +fast.' pg3200.txt +fast; pg3200.txt, pg100.txt +fast? pg100.txt +fast?' pg3200.txt +fasted. pg3200.txt +fasten pg31100.txt, pg3200.txt +fasten, pg3200.txt +fastened pg3200.txt +fastener, pg31100.txt +faster pg3200.txt +faster, pg100.txt +faster. pg3200.txt, pg100.txt +fastest pg3200.txt +fastidious pg3200.txt +fasting pg3200.txt +fasting, pg100.txt +fasting?" pg3200.txt +fastnesses pg3200.txt +fastolfe pg3200.txt, pg100.txt +fastolfe, pg100.txt +fat pg31100.txt, pg3200.txt, pg100.txt +fat, pg3200.txt, pg100.txt +fat. pg31100.txt, pg100.txt +fatal pg3200.txt, pg100.txt +fatal, pg3200.txt, pg100.txt +fatal. pg3200.txt +fatal." pg3200.txt +fatality pg3200.txt +fatally pg3200.txt +fate pg31100.txt, pg3200.txt, pg100.txt +fate! pg3200.txt, pg100.txt +fate, pg31100.txt, pg3200.txt, pg100.txt +fate. pg31100.txt, pg3200.txt, pg100.txt +fate." pg31100.txt, pg3200.txt +fate: pg3200.txt +fate; pg3200.txt +fate? pg31100.txt +fated pg31100.txt, pg100.txt +fates pg31100.txt, pg3200.txt, pg100.txt +fates! pg100.txt +fates.' pg100.txt +fates: pg100.txt +father pg31100.txt, pg3200.txt, pg100.txt +father! pg31100.txt, pg3200.txt, pg100.txt +father!" pg31100.txt, pg3200.txt +father!- pg100.txt +father!--" pg3200.txt +father"-- pg3200.txt +father'- pg100.txt +father's pg31100.txt, pg3200.txt, pg100.txt +father's, pg31100.txt +father's. pg31100.txt, pg100.txt +father's." pg31100.txt +father's; pg31100.txt, pg3200.txt, pg100.txt +father's? pg100.txt +father's?" pg3200.txt +father) pg31100.txt +father, pg31100.txt, pg3200.txt, pg100.txt +father- pg100.txt +father--" pg3200.txt +father--? pg31100.txt +father--but pg31100.txt +father--could pg31100.txt +father--glad pg31100.txt +father-in-law pg31100.txt +father-in-law! pg100.txt +father-in-law, pg3200.txt +father-in-law. pg100.txt +father. pg31100.txt, pg3200.txt, pg100.txt +father." pg31100.txt, pg3200.txt +father.--you pg31100.txt +father: pg31100.txt, pg3200.txt, pg100.txt +father; pg31100.txt, pg3200.txt, pg100.txt +father? pg3200.txt, pg100.txt +father?" pg31100.txt, pg3200.txt +fatherland, pg3200.txt +fatherless. pg100.txt +fathers pg3200.txt, pg100.txt +fathers, pg31100.txt, pg3200.txt, pg100.txt +fathers-men pg100.txt +fathers. pg3200.txt +fathom pg100.txt +fathom. pg100.txt +fathom." pg3200.txt +fathomless pg3200.txt, pg100.txt +fathoms. pg3200.txt +fatigate, pg100.txt +fatigue pg31100.txt, pg3200.txt +fatigue! pg3200.txt +fatigue, pg31100.txt, pg3200.txt +fatigue--for, pg3200.txt +fatigue. pg3200.txt +fatigue." pg31100.txt +fatigued pg31100.txt +fatigued. pg31100.txt, pg3200.txt +fatigues pg31100.txt, pg3200.txt +fatigues, pg31100.txt, pg3200.txt +fatiguing pg31100.txt, pg3200.txt +fatiguing, pg3200.txt +fatiguing. pg3200.txt +fatten pg3200.txt +fattened. pg3200.txt +fattening pg3200.txt +fatuus pg100.txt +faucets pg3200.txt +fauconbridge, pg100.txt +faulconbridge pg100.txt +faulconbridge, pg100.txt +faulconbridge- pg100.txt +faulconbridge. pg100.txt +faulconbridge? pg100.txt +fault pg31100.txt, pg3200.txt, pg100.txt +fault! pg100.txt +fault, pg31100.txt, pg3200.txt, pg100.txt +fault- pg100.txt +fault--because pg31100.txt +fault--it pg3200.txt +fault--skirting pg3200.txt +fault--there pg3200.txt +fault-finder. pg3200.txt +fault-finding. pg31100.txt +fault-finding; pg3200.txt +fault. pg31100.txt, pg3200.txt, pg100.txt +fault." pg3200.txt +fault.' pg3200.txt +fault; pg31100.txt, pg3200.txt, pg100.txt +fault? pg100.txt +faultfinding, pg3200.txt +faultiness. pg100.txt +faultless pg3200.txt +faultless, pg3200.txt +faults pg31100.txt, pg3200.txt, pg100.txt +faults, pg31100.txt, pg3200.txt, pg100.txt +faults. pg31100.txt, pg3200.txt, pg100.txt +faults.' pg100.txt +faults; pg31100.txt, pg3200.txt, pg100.txt +faults? pg100.txt +faulty. pg31100.txt +faun pg3200.txt +faustuses. pg100.txt +faut pg3200.txt +fauteuils pg3200.txt +favor pg3200.txt, pg100.txt +favor, pg3200.txt +favor--" pg3200.txt +favor. pg3200.txt, pg100.txt +favor." pg3200.txt +favor: pg3200.txt +favor; pg3200.txt +favor? pg3200.txt +favor?" pg3200.txt +favorable pg3200.txt +favorable. pg3200.txt +favorably pg3200.txt +favorably. pg3200.txt +favored pg3200.txt +favoring pg3200.txt +favorite pg3200.txt +favorite, pg3200.txt +favorite. pg3200.txt +favorites; pg3200.txt +favors pg3200.txt +favour pg31100.txt, pg100.txt +favour! pg100.txt +favour, pg31100.txt, pg3200.txt, pg100.txt +favour. pg31100.txt, pg100.txt +favour." pg31100.txt +favour.--for pg31100.txt +favour.] pg100.txt +favour: pg31100.txt +favour; pg31100.txt, pg100.txt +favour? pg100.txt +favourable pg31100.txt +favourable! pg31100.txt +favourable, pg31100.txt +favourably, pg31100.txt +favourer pg100.txt +favouring pg31100.txt +favourite pg31100.txt +favourite, pg31100.txt +favourite--always pg31100.txt +favourite. pg31100.txt +favourite." pg31100.txt +favourites, pg100.txt +favourites. pg31100.txt +favourites." pg31100.txt +favours pg31100.txt, pg100.txt +favours! pg100.txt +favours, pg31100.txt, pg100.txt +favours- pg100.txt +favours. pg100.txt +favours; pg31100.txt +favours? pg100.txt +favout pg100.txt +fawn, pg100.txt +fawned pg3200.txt +fay pg3200.txt +fay, pg3200.txt +fay. pg3200.txt +fayal pg3200.txt +fayal. pg3200.txt +fe-el-low, pg3200.txt +fealty pg3200.txt +fealty. pg100.txt +fealty] pg100.txt +fear pg31100.txt, pg3200.txt, pg100.txt +fear! pg100.txt +fear!" pg3200.txt +fear'd pg100.txt +fear'd, pg100.txt +fear'd; pg100.txt +fear'st pg100.txt +fear'st. pg100.txt +fear, pg31100.txt, pg3200.txt, pg100.txt +fear,' pg3200.txt +fear- pg100.txt +fear--yes, pg3200.txt +fear-bound. pg3200.txt +fear. pg3200.txt, pg100.txt +fear." pg3200.txt +fear: pg100.txt +fear; pg31100.txt, pg3200.txt, pg100.txt +fear;' pg3200.txt +fear? pg100.txt +fear?' pg100.txt +feared pg31100.txt, pg3200.txt, pg100.txt +feared, pg31100.txt, pg3200.txt +feared. pg3200.txt +feared.] pg3200.txt +fearful pg31100.txt, pg3200.txt, pg100.txt +fearful, pg31100.txt, pg100.txt +fearful. pg100.txt +fearful: pg100.txt +fearful? pg100.txt +fearfully pg31100.txt, pg3200.txt +fearing pg31100.txt, pg3200.txt +fearing, pg31100.txt +fearing. pg100.txt +fearless pg3200.txt +fearless, pg31100.txt +fearlessly pg3200.txt +fearlessness pg3200.txt +fears pg31100.txt, pg3200.txt, pg100.txt +fears, pg31100.txt, pg3200.txt, pg100.txt +fears- pg100.txt +fears--could pg31100.txt +fears. pg3200.txt, pg100.txt +fears." pg3200.txt +fears: pg100.txt +fears; pg100.txt +fears? pg100.txt +feasible, pg31100.txt, pg3200.txt +feast pg3200.txt, pg100.txt +feast, pg100.txt +feast- pg100.txt +feast-day. pg3200.txt +feast-day?" pg3200.txt +feast. pg3200.txt, pg100.txt +feast: pg3200.txt, pg100.txt +feast; pg3200.txt, pg100.txt +feast? pg100.txt +feasted pg3200.txt +feasted; pg3200.txt +feasting pg3200.txt +feasting, pg3200.txt +feasting. pg100.txt +feasts pg3200.txt, pg100.txt +feasts, pg3200.txt, pg100.txt +feasts. pg3200.txt, pg100.txt +feat pg3200.txt, pg100.txt +feat, pg100.txt +feat. pg3200.txt, pg100.txt +feat; pg3200.txt +feather pg3200.txt, pg100.txt +feather, pg3200.txt, pg100.txt +feather-headed pg3200.txt +feather. pg100.txt +feather." pg31100.txt +feather; pg100.txt +feathers pg3200.txt +feathers, pg3200.txt +feathers. pg3200.txt +feathers; pg3200.txt +featherstitch pg3200.txt +featly. pg100.txt +feats pg3200.txt, pg100.txt +feats, pg100.txt +feats. pg100.txt +feats? pg100.txt +featur'd, pg100.txt +feature pg31100.txt, pg3200.txt, pg100.txt +feature, pg31100.txt, pg100.txt +feature. pg31100.txt, pg3200.txt, pg100.txt +featureless, pg3200.txt +features pg31100.txt, pg3200.txt +features, pg31100.txt, pg3200.txt +features. pg3200.txt +features? pg100.txt +feb'uary." pg3200.txt +feb. pg3200.txt +feb.--. pg3200.txt +february pg31100.txt, pg3200.txt +february, pg31100.txt, pg3200.txt +february--i pg3200.txt +february. pg3200.txt +february." pg31100.txt +february?" pg3200.txt +fecks! pg100.txt +fecundity, pg3200.txt +fecundity. pg3200.txt +fed pg3200.txt, pg100.txt +fed, pg3200.txt, pg100.txt +fed. pg100.txt +fed; pg100.txt +fedary pg100.txt +fee pg31100.txt, pg3200.txt, pg100.txt +fee, pg3200.txt, pg100.txt +fee- pg100.txt +fee-grief pg100.txt +fee-simple pg100.txt +fee. pg3200.txt, pg100.txt +fee; pg100.txt +feeble pg31100.txt, pg3200.txt, pg100.txt +feeble! pg100.txt +feeble, pg3200.txt, pg100.txt +feeble. pg100.txt +feeble." pg31100.txt +feeble; pg3200.txt +feeble? pg100.txt +feebled pg100.txt +feebleness. pg100.txt +feeblest pg3200.txt +feebly pg3200.txt +feebly: pg3200.txt +feebly:) pg3200.txt +feed pg31100.txt, pg3200.txt, pg100.txt +feed! pg100.txt +feed, pg3200.txt, pg100.txt +feed. pg3200.txt, pg100.txt +feed? pg100.txt +feeder, pg100.txt +feeder; pg100.txt +feeders pg100.txt +feeders? pg100.txt +feeding pg31100.txt, pg3200.txt +feeding, pg100.txt +feeding. pg100.txt +feeding; pg100.txt +feeds pg100.txt +feeds, pg100.txt +feel pg31100.txt, pg3200.txt, pg100.txt +feel! pg31100.txt, pg3200.txt +feel't pg100.txt +feel, pg31100.txt, pg3200.txt, pg100.txt +feel. pg31100.txt, pg3200.txt, pg100.txt +feel." pg31100.txt, pg3200.txt +feel; pg3200.txt +feel? pg31100.txt +feel?)" pg3200.txt +feeler, pg3200.txt +feelers--and pg3200.txt +feeling pg31100.txt, pg3200.txt, pg100.txt +feeling!" pg3200.txt +feeling, pg31100.txt, pg3200.txt +feeling-- pg3200.txt +feeling--from pg31100.txt +feeling--she pg3200.txt +feeling. pg31100.txt, pg3200.txt +feeling." pg31100.txt, pg3200.txt +feeling: pg3200.txt +feeling; pg31100.txt, pg3200.txt +feeling? pg3200.txt +feelingly pg3200.txt +feelingly, pg3200.txt +feelingly. pg100.txt +feelingly: pg3200.txt +feelings pg31100.txt, pg3200.txt +feelings! pg31100.txt +feelings!" pg31100.txt +feelings, pg31100.txt, pg3200.txt +feelings," pg31100.txt +feelings. pg31100.txt, pg3200.txt +feelings." pg31100.txt +feelings: pg3200.txt +feelings; pg31100.txt, pg3200.txt +feelings?" pg31100.txt +feelings?--i pg31100.txt +feels pg31100.txt, pg3200.txt, pg100.txt +feels, pg3200.txt, pg100.txt +feels. pg3200.txt +fees pg31100.txt, pg3200.txt, pg100.txt +fees, pg31100.txt, pg3200.txt, pg100.txt +fees. pg3200.txt, pg100.txt +fees; pg100.txt +fees? pg100.txt +feet pg31100.txt, pg3200.txt, pg100.txt +feet! pg3200.txt, pg100.txt +feet!" pg3200.txt +feet, pg3200.txt, pg100.txt +feet-- pg3200.txt +feet--" pg3200.txt +feet--beurre pg3200.txt +feet--called pg3200.txt +feet--may pg3200.txt +feet--what pg3200.txt +feet-high, pg3200.txt +feet. pg31100.txt, pg3200.txt, pg100.txt +feet." pg3200.txt +feet.' pg3200.txt +feet.] pg100.txt +feet: pg3200.txt +feet; pg3200.txt, pg100.txt +feet? pg100.txt +feet?" pg3200.txt +fegs, pg3200.txt +feign pg3200.txt +feign, pg100.txt +feign. pg100.txt +feigning, pg100.txt +feinde; pg3200.txt +feirn, pg3200.txt +fel-low, pg3200.txt +felicitate pg100.txt +felicities pg31100.txt, pg3200.txt +felicities! pg31100.txt +felicities, pg31100.txt +felicities. pg3200.txt +felicitous pg3200.txt +felicitously pg3200.txt +felicity pg31100.txt, pg3200.txt +felicity! pg31100.txt, pg3200.txt +felicity, pg31100.txt +felicity--to pg31100.txt +felicity. pg31100.txt, pg3200.txt, pg100.txt +felicity." pg31100.txt +felicity?" pg31100.txt +fell pg31100.txt, pg3200.txt, pg100.txt +fell! pg3200.txt +fell, pg31100.txt, pg3200.txt, pg100.txt +fell--when pg3200.txt +fell. pg31100.txt, pg3200.txt, pg100.txt +fell: pg3200.txt +fell; pg3200.txt +felle, pg3200.txt +felled pg3200.txt +feller pg3200.txt +feller, pg3200.txt +fellers pg3200.txt +felling pg3200.txt +fellow pg31100.txt, pg3200.txt, pg100.txt +fellow! pg31100.txt, pg3200.txt, pg100.txt +fellow!' pg100.txt +fellow's pg3200.txt +fellow, pg31100.txt, pg3200.txt, pg100.txt +fellow," pg3200.txt +fellow- pg100.txt +fellow--" pg3200.txt +fellow--admirable!" pg3200.txt +fellow--left pg3200.txt +fellow-artists, pg3200.txt +fellow-being pg3200.txt +fellow-being. pg3200.txt +fellow-citizen pg3200.txt +fellow-conspirator. pg3200.txt +fellow-craftsman-- pg3200.txt +fellow-feeling pg3200.txt +fellow-member pg3200.txt +fellow-ministers pg100.txt +fellow-missourian pg3200.txt +fellow-passenger pg3200.txt +fellow-pilot pg3200.txt +fellow-slaves pg3200.txt +fellow-traveler. pg3200.txt +fellow-unfortunate pg3200.txt +fellow-voyagers, pg3200.txt +fellow. pg31100.txt, pg3200.txt, pg100.txt +fellow." pg31100.txt, pg3200.txt +fellow.' pg100.txt +fellow; pg3200.txt, pg100.txt +fellow? pg100.txt +fellowman." pg3200.txt +fellows pg31100.txt, pg3200.txt, pg100.txt +fellows! pg3200.txt +fellows, pg3200.txt, pg100.txt +fellows- pg100.txt +fellows. pg3200.txt, pg100.txt +fellows." pg3200.txt +fellows; pg3200.txt, pg100.txt +fellows? pg3200.txt +fellowship pg3200.txt, pg100.txt +fellowship! pg100.txt +fellowship, pg3200.txt, pg100.txt +fellowship- pg100.txt +fellowship. pg3200.txt, pg100.txt +fells, pg100.txt +felony, pg100.txt +felow, pg31100.txt +felsenthor, pg3200.txt +felt pg31100.txt, pg3200.txt, pg100.txt +felt!--a pg31100.txt +felt, pg31100.txt, pg3200.txt, pg100.txt +felt--it pg31100.txt +felt. pg31100.txt, pg3200.txt, pg100.txt +felt." pg31100.txt +felt: pg3200.txt +felt; pg31100.txt, pg3200.txt +female pg31100.txt, pg3200.txt, pg100.txt +female, pg3200.txt +female. pg31100.txt +female; pg100.txt +females pg31100.txt +females": pg3200.txt +females." pg31100.txt +feminine--was pg31100.txt +feminine." pg3200.txt +fen pg100.txt +fen. pg100.txt +fence pg3200.txt, pg100.txt +fence! pg100.txt +fence, pg3200.txt +fence- pg3200.txt +fence. pg3200.txt +fence." pg3200.txt +fence: pg3200.txt +fence; pg3200.txt +fence?" pg3200.txt +fenced pg31100.txt, pg3200.txt +fencer pg100.txt +fences pg3200.txt +fences, pg3200.txt +fences--these pg3200.txt +fences. pg3200.txt +fencing pg3200.txt +fencing; pg3200.txt +fended pg3200.txt +fenton pg100.txt +fenton! pg100.txt +fenton, pg100.txt +fenton. pg100.txt +fer pg3200.txt +fer. pg100.txt +ferdinand pg100.txt +ferdinand! pg100.txt +ferdinand, pg100.txt +ferdinand. pg100.txt +fere pg100.txt +ferguson pg3200.txt +ferguson. pg3200.txt +ferguson." pg3200.txt +ferguson?" pg3200.txt +feringhea pg3200.txt +feringhea. pg3200.txt +ferito. pg3200.txt +ferment pg3200.txt +ferment, pg3200.txt +ferne pg3200.txt +ferns pg3200.txt +ferns. pg3200.txt +ferocious pg3200.txt +ferrara. pg100.txt +ferrars pg31100.txt +ferrars!" pg31100.txt +ferrars's pg31100.txt +ferrars's, pg31100.txt +ferrars, pg31100.txt +ferrars. pg31100.txt +ferrars." pg31100.txt +ferrars._] pg31100.txt +ferrars?" pg31100.txt +ferrers, pg100.txt +ferret pg3200.txt +ferry pg3200.txt, pg100.txt +ferry, pg3200.txt +ferry-boat pg3200.txt +ferry-landing, pg3200.txt +ferry. pg3200.txt +ferryboat pg3200.txt +ferryboat's pg3200.txt +ferryboat, pg3200.txt +ferrying. pg3200.txt +fertile pg3200.txt +fertile, pg3200.txt +fertile. pg100.txt +fertility. pg3200.txt, pg100.txt +fertilized pg3200.txt +fertilizing pg3200.txt +fervency pg3200.txt +fervency: pg3200.txt +fervent pg31100.txt, pg3200.txt +fervent, pg31100.txt, pg3200.txt +fervent-- pg3200.txt +fervently pg3200.txt +fervently. pg3200.txt +fervently: pg3200.txt +fervid pg3200.txt +fervidly-- pg3200.txt +fervor: pg3200.txt +fery pg100.txt +fess?" pg3200.txt +fester. pg100.txt +festival pg100.txt +festival! pg100.txt +festival, pg3200.txt +festival. pg100.txt +festivals pg3200.txt, pg100.txt +festive pg3200.txt +festive. pg3200.txt +festivities pg3200.txt +festivities." pg3200.txt +festivity pg3200.txt +festoons." pg31100.txt +fet pg100.txt +fetch pg31100.txt, pg3200.txt, pg100.txt +fetch'd pg100.txt +fetch't, pg100.txt +fetch? pg100.txt +fetched pg31100.txt, pg3200.txt +fetches pg31100.txt, pg3200.txt +fetches- pg100.txt +fetching pg31100.txt, pg3200.txt +feted pg3200.txt +feted, pg3200.txt +fetid pg3200.txt +fetish, pg3200.txt +fetlock pg3200.txt +fetter'd pg100.txt +fetter: pg100.txt +fettered pg31100.txt, pg3200.txt +fetters pg3200.txt +fetters. pg3200.txt +feu. pg100.txt +feud pg3200.txt +feud." pg3200.txt +feud?" pg3200.txt +feuds, pg3200.txt +fever pg3200.txt, pg100.txt +fever! pg100.txt +fever, pg3200.txt, pg100.txt +fever--its pg31100.txt +fever-heat pg3200.txt +fever. pg3200.txt +fever; pg31100.txt +fever?" pg31100.txt +feverish pg31100.txt, pg3200.txt +feverish, pg31100.txt +feverishly pg3200.txt +feverishly, pg3200.txt +fevers pg3200.txt +fevers, pg3200.txt +few pg31100.txt, pg3200.txt, pg100.txt +few, pg31100.txt, pg3200.txt, pg100.txt +few- pg100.txt +few. pg3200.txt, pg100.txt +few." pg3200.txt +few: pg3200.txt +fewer pg3200.txt +fewer. pg100.txt +fez'd pg3200.txt +fez, pg3200.txt +fezzes, pg3200.txt +fiasco pg3200.txt +fib." pg3200.txt +fickle pg3200.txt +fickle. pg3200.txt, pg100.txt +fickle." pg31100.txt +fickle?" pg3200.txt +fickleness! pg31100.txt +fickleness. pg100.txt +fico pg100.txt +fiction pg3200.txt +fiction! pg3200.txt +fiction, pg3200.txt, pg100.txt +fiction-writer pg3200.txt +fiction. pg3200.txt, pg100.txt +fiction; pg3200.txt +fictions, pg3200.txt +fictions. pg3200.txt +fictitious pg3200.txt +fiddle-bow pg3200.txt +fiddle-bow? pg3200.txt +fiddle. pg3200.txt +fiddler pg100.txt +fiddler, pg3200.txt +fiddlers. pg3200.txt +fiddlestick! pg100.txt +fiddling pg3200.txt +fidele! pg100.txt +fidele, pg100.txt +fidele. pg100.txt +fidelity pg31100.txt, pg3200.txt +fidelity, pg3200.txt +fidelity. pg3200.txt +fidget pg31100.txt +fidgeted pg31100.txt, pg3200.txt +fidgeted, pg3200.txt +fidgeting pg3200.txt +fidgetings pg3200.txt +fidgets, pg31100.txt +fidius'd pg100.txt +fie pg100.txt +fie! pg100.txt +field pg31100.txt, pg3200.txt, pg100.txt +field! pg3200.txt, pg100.txt +field, pg31100.txt, pg3200.txt, pg100.txt +field--and pg3200.txt +field--would pg3200.txt +field-mouse; pg3200.txt +field. pg3200.txt, pg100.txt +field: pg3200.txt, pg100.txt +field; pg3200.txt, pg100.txt +field? pg100.txt +field?" pg3200.txt +fielding, pg3200.txt +fields pg3200.txt, pg100.txt +fields, pg3200.txt, pg100.txt +fields- pg100.txt +fields--" pg3200.txt +fields--sometimes pg3200.txt +fields. pg3200.txt, pg100.txt +fields; pg3200.txt +fiend pg100.txt +fiend! pg100.txt +fiend's. pg100.txt +fiend, pg3200.txt, pg100.txt +fiend. pg100.txt +fiend; pg3200.txt +fiend? pg3200.txt +fiendish pg3200.txt +fiendish. pg3200.txt +fiends pg3200.txt, pg100.txt +fiends! pg100.txt +fiends, pg100.txt +fiends. pg3200.txt, pg100.txt +fierbois. pg3200.txt +fierbois?" pg3200.txt +fierce pg3200.txt, pg100.txt +fierce, pg100.txt +fierce. pg100.txt +fiercely pg3200.txt +fiercely, pg3200.txt +fiercely-- pg3200.txt +fiercely-flourishing pg3200.txt +fiercely. pg3200.txt +fierceness. pg100.txt +fiercer pg3200.txt +fiery pg3200.txt +fiery, pg3200.txt, pg100.txt +fife, pg100.txt +fife. pg100.txt +fifes pg3200.txt +fifes, pg100.txt +fifteen pg31100.txt, pg3200.txt, pg100.txt +fifteen, pg31100.txt, pg3200.txt +fifteen--fifteen--fifteen--thirty-four. pg3200.txt +fifteen--in pg3200.txt +fifteen. pg31100.txt, pg3200.txt +fifteen?" pg31100.txt +fifteenth pg3200.txt, pg100.txt +fifth pg3200.txt, pg100.txt +fifth, pg100.txt +fifth. pg3200.txt +fifth? pg100.txt +fiftieth, pg3200.txt +fifty pg31100.txt, pg3200.txt, pg100.txt +fifty!" pg3200.txt +fifty!--"] pg3200.txt +fifty, pg3200.txt +fifty-dollar pg3200.txt +fifty-eight pg3200.txt +fifty-eight, pg3200.txt +fifty-five pg3200.txt +fifty-five--and pg3200.txt +fifty-four pg3200.txt +fifty-inch, pg3200.txt +fifty-one pg3200.txt +fifty-seven pg3200.txt +fifty-six pg3200.txt +fifty-three pg3200.txt +fifty-two pg3200.txt +fifty. pg3200.txt +fifty." pg31100.txt +fifty; pg3200.txt, pg100.txt +fig pg3200.txt, pg100.txt +fig-leaf pg3200.txt +fig-leaf. pg3200.txt +fig-leaves pg100.txt +fig-tree pg3200.txt +fig. pg100.txt +figger." pg3200.txt +fight pg3200.txt, pg100.txt +fight! pg100.txt +fight's pg3200.txt +fight'st. pg100.txt +fight, pg3200.txt, pg100.txt +fight- pg100.txt +fight. pg3200.txt, pg100.txt +fight." pg3200.txt +fight.' pg3200.txt +fight.] pg100.txt +fight; pg3200.txt, pg100.txt +fight? pg100.txt +fight?" pg3200.txt +fight] pg100.txt +fighter pg3200.txt +fighter, pg100.txt +fighters pg3200.txt +fighting pg3200.txt, pg100.txt +fighting), pg3200.txt +fighting, pg3200.txt +fighting-habits pg3200.txt +fighting. pg31100.txt, pg100.txt +fighting; pg100.txt +fights pg3200.txt +fights, pg100.txt +fights; pg100.txt +figs! pg3200.txt +figs, pg3200.txt +figs--we pg3200.txt +figs. pg100.txt +figurative pg3200.txt +figuratively, pg3200.txt +figure pg31100.txt, pg3200.txt, pg100.txt +figure! pg31100.txt, pg100.txt +figure!--and pg31100.txt +figure, pg3200.txt +figure. pg31100.txt, pg3200.txt, pg100.txt +figure." pg31100.txt, pg3200.txt +figure; pg31100.txt, pg3200.txt +figure? pg3200.txt, pg100.txt +figure?" pg31100.txt, pg3200.txt +figure] pg3200.txt +figured pg3200.txt +figures pg31100.txt, pg3200.txt, pg100.txt +figures). pg3200.txt +figures, pg3200.txt, pg100.txt +figures- pg100.txt +figures: pg3200.txt +figuring pg3200.txt +figuring, pg3200.txt +fiji. pg3200.txt +fiji."] pg3200.txt +filbert, pg3200.txt +filched pg3200.txt +filching pg3200.txt +file pg3200.txt, pg100.txt +file, pg3200.txt, pg100.txt +file. pg3200.txt +file; pg3200.txt, pg100.txt +filed pg3200.txt +filed, pg3200.txt +filed. pg100.txt +filename. pg100.txt +files pg3200.txt, pg100.txt +files. pg3200.txt +filing pg3200.txt +filkins pg3200.txt +fill pg31100.txt, pg3200.txt, pg100.txt +fill'd pg100.txt +fill'd, pg100.txt +fill'd; pg100.txt +fill, pg3200.txt, pg100.txt +fill. pg31100.txt, pg3200.txt, pg100.txt +fill; pg3200.txt +filled pg31100.txt, pg3200.txt +filled, pg3200.txt +filled. pg3200.txt, pg100.txt +filled." pg31100.txt +filling pg31100.txt, pg3200.txt, pg100.txt +filling, pg3200.txt +filling. pg3200.txt +fills pg3200.txt, pg100.txt +fills. pg3200.txt +film pg3200.txt +film, pg3200.txt +film; pg100.txt +filmy pg3200.txt +filmy, pg3200.txt +fils, pg3200.txt +filter pg3200.txt +filtering pg3200.txt +filth, pg100.txt +filths pg100.txt +filthy pg3200.txt, pg100.txt +filthy, pg100.txt +filthy; pg100.txt +fin, pg3200.txt +fin. pg100.txt +final pg31100.txt, pg3200.txt +final--she pg3200.txt +final. pg3200.txt +finale pg3200.txt +finale:-- pg31100.txt +finality pg3200.txt +finally pg31100.txt, pg3200.txt +finally, pg3200.txt +finally. pg3200.txt +finally: pg3200.txt +finance. pg3200.txt +finances pg31100.txt +financial pg3200.txt +financial, pg3200.txt +financially pg3200.txt +financier pg3200.txt +financier, pg3200.txt +financiering. pg3200.txt +find pg31100.txt, pg3200.txt, pg100.txt +find! pg100.txt +find'st pg100.txt +find, pg31100.txt, pg3200.txt, pg100.txt +find- pg100.txt +find-faults- pg100.txt +find. pg31100.txt, pg3200.txt, pg100.txt +find." pg3200.txt +find: pg3200.txt, pg100.txt +find; pg100.txt +find? pg3200.txt +find?" pg3200.txt +findable pg3200.txt +finder pg100.txt +finding pg31100.txt, pg3200.txt, pg100.txt +finds pg31100.txt, pg3200.txt, pg100.txt +finds, pg100.txt +finds. pg3200.txt, pg100.txt +fine pg31100.txt, pg3200.txt, pg100.txt +fine! pg3200.txt +fine!" pg31100.txt +fine, pg31100.txt, pg3200.txt, pg100.txt +fine--i pg31100.txt +fine. pg3200.txt, pg100.txt +fine." pg31100.txt, pg3200.txt +fine; pg31100.txt, pg3200.txt, pg100.txt +fined. pg3200.txt +finely pg3200.txt +finely, pg3200.txt +finely. pg100.txt +fineness pg3200.txt +finer pg31100.txt, pg3200.txt, pg100.txt +finer), pg100.txt +finer, pg3200.txt +finer. pg31100.txt, pg3200.txt +fineries. pg3200.txt +fineries; pg3200.txt +finery pg31100.txt, pg3200.txt +finery, pg3200.txt +finery. pg31100.txt +fines pg3200.txt +fines; pg100.txt +finessed pg31100.txt +finest pg31100.txt, pg3200.txt +finest-looking pg31100.txt +finger pg31100.txt, pg3200.txt, pg100.txt +finger, pg3200.txt, pg100.txt +finger-biting pg3200.txt +finger-boards. pg3200.txt +finger-end; pg100.txt +finger-ends.] pg3200.txt +finger-nail, pg3200.txt +finger-nails, pg3200.txt +finger-prints pg3200.txt +finger. pg31100.txt, pg3200.txt, pg100.txt +finger: pg3200.txt +finger; pg3200.txt +fingered pg3200.txt +fingering pg3200.txt +fingering, pg100.txt +fingernails pg3200.txt +fingerprints pg3200.txt +fingers pg31100.txt, pg3200.txt, pg100.txt +fingers! pg100.txt +fingers, pg3200.txt, pg100.txt +fingers--appearing pg3200.txt +fingers. pg31100.txt, pg3200.txt, pg100.txt +fingers.) pg3200.txt +fingers: pg3200.txt +fingers; pg3200.txt +fingers] pg100.txt +fingre, pg100.txt +fingres. pg100.txt +fingres; pg100.txt +finis pg31100.txt +finis! pg3200.txt +finis. pg31100.txt +finish pg31100.txt, pg3200.txt, pg100.txt +finish'd, pg100.txt +finish'd. pg100.txt +finish, pg3200.txt +finish--5. pg3200.txt +finish. pg31100.txt, pg3200.txt, pg100.txt +finish." pg3200.txt +finished pg31100.txt, pg3200.txt +finished, pg31100.txt, pg3200.txt +finished. pg31100.txt, pg3200.txt +finished." pg3200.txt +finished.) pg3200.txt +finished: pg3200.txt +finished; pg3200.txt +finisher pg100.txt +finishes pg3200.txt +finishes. pg3200.txt +finishing pg31100.txt, pg3200.txt +finishing. pg3200.txt +finishing; pg3200.txt +finn pg3200.txt +finn!" pg3200.txt +finn'. pg3200.txt +finn, pg3200.txt +finn," pg3200.txt +finn. pg3200.txt +finn." pg3200.txt +finn? pg3200.txt +finsbury. pg100.txt +finsterniss pg3200.txt +fir'd pg100.txt +fire pg31100.txt, pg3200.txt, pg100.txt +fire! pg3200.txt, pg100.txt +fire!" pg3200.txt +fire!--i pg31100.txt +fire's pg100.txt +fire) pg3200.txt +fire, pg31100.txt, pg3200.txt, pg100.txt +fire," pg3200.txt +fire- pg3200.txt +fire-assay! pg3200.txt +fire-assay; pg3200.txt +fire-belching pg3200.txt +fire-hued pg3200.txt +fire-new pg100.txt +fire-proof pg3200.txt +fire-side pg3200.txt +fire. pg31100.txt, pg3200.txt, pg100.txt +fire." pg31100.txt, pg3200.txt +fire: pg3200.txt +fire; pg31100.txt, pg3200.txt, pg100.txt +fire? pg3200.txt, pg100.txt +firebrands. pg100.txt +firecoal. pg3200.txt +firecracker, pg3200.txt +fired pg3200.txt +fired, pg3200.txt +fired. pg3200.txt +fireless pg3200.txt +firemen pg3200.txt +firenze, pg3200.txt +fireplace pg31100.txt, pg3200.txt +fireplace, pg31100.txt, pg3200.txt +fireplace. pg31100.txt, pg3200.txt +fires pg3200.txt, pg100.txt +fires! pg100.txt +fires, pg3200.txt, pg100.txt +fires-- pg3200.txt +fires--" pg3200.txt +fires. pg3200.txt, pg100.txt +fires; pg100.txt +fireside pg3200.txt +fireside, pg31100.txt, pg3200.txt +fireside--even pg3200.txt +fireside. pg31100.txt, pg3200.txt +firesides pg3200.txt +firesides, pg3200.txt +fireworks pg3200.txt +fireworks, pg3200.txt +fireworks; pg100.txt +firing pg3200.txt, pg100.txt +firk. pg100.txt +firm pg31100.txt, pg3200.txt, pg100.txt +firm, pg3200.txt, pg100.txt +firm. pg31100.txt, pg3200.txt, pg100.txt +firm.) pg3200.txt +firm.] pg3200.txt +firm; pg3200.txt, pg100.txt +firm? pg3200.txt +firmament pg3200.txt +firmament! pg100.txt +firmament. pg3200.txt, pg100.txt +firmament; pg3200.txt +firmest pg3200.txt +firmly pg3200.txt, pg100.txt +firmly. pg100.txt +firmness pg31100.txt, pg3200.txt +firmness, pg31100.txt +firmness. pg3200.txt +firmness: pg3200.txt +firs; pg31100.txt +first! pg3200.txt, pg100.txt +first!" pg3200.txt +first) pg3200.txt +first, pg31100.txt, pg3200.txt, pg100.txt +first--indeed, pg31100.txt +first--she pg31100.txt +first--when pg3200.txt +first-class pg3200.txt +first-rate pg3200.txt +first-rate. pg3200.txt +first. pg31100.txt, pg3200.txt, pg100.txt +first." pg31100.txt, pg3200.txt +first: pg31100.txt, pg3200.txt +first; pg31100.txt, pg3200.txt, pg100.txt +first? pg31100.txt, pg100.txt +first?" pg3200.txt +first?' pg3200.txt +first]. pg100.txt +firstly, pg31100.txt +firstly. pg3200.txt +fischer, pg3200.txt +fischers. pg3200.txt +fish pg31100.txt, pg3200.txt, pg100.txt +fish, pg3200.txt, pg100.txt +fish-basket; pg3200.txt +fish-belly pg3200.txt +fish-hook." pg3200.txt +fish-hooks pg3200.txt +fish-hooks.' pg3200.txt +fish-interiors!--' pg3200.txt +fish-lines pg3200.txt +fish-pools, pg3200.txt +fish-woman. pg3200.txt +fish. pg100.txt +fish." pg3200.txt +fish: pg3200.txt +fish; pg3200.txt, pg100.txt +fished pg3200.txt +fisher pg3200.txt +fisher, pg3200.txt +fisher. pg3200.txt +fisherman pg3200.txt +fishermen pg3200.txt +fishers pg3200.txt +fishery pg3200.txt +fishes pg3200.txt +fishes," pg3200.txt +fishes. pg100.txt +fishes." pg3200.txt +fishes; pg3200.txt, pg100.txt +fishhooks, pg3200.txt +fishing pg3200.txt +fishing-rod. pg3200.txt +fishing. pg3200.txt +fishmonger. pg100.txt +fishpond pg100.txt +fishwife, pg3200.txt +fishwoman. pg3200.txt +fiske, pg3200.txt +fiske. pg3200.txt +fiske: pg3200.txt +fisnomy pg100.txt +fissure. pg3200.txt +fissures pg3200.txt +fist pg3200.txt +fist, pg3200.txt, pg100.txt +fist--say, pg3200.txt +fist; pg3200.txt +fists pg3200.txt +fit pg31100.txt, pg3200.txt, pg100.txt +fit, pg3200.txt, pg100.txt +fit- pg100.txt +fit--but pg3200.txt +fit. pg3200.txt, pg100.txt +fit; pg100.txt +fitful pg3200.txt +fitfully pg3200.txt +fitly pg100.txt +fitment pg100.txt +fitness pg100.txt +fitness? pg100.txt +fits pg3200.txt, pg100.txt +fits! pg100.txt +fits, pg31100.txt, pg100.txt +fits. pg3200.txt, pg100.txt +fitted pg31100.txt, pg3200.txt, pg100.txt +fitted. pg100.txt +fitten pg3200.txt +fitter pg3200.txt, pg100.txt +fittest. pg100.txt +fitting pg3200.txt, pg100.txt +fitting, pg31100.txt +fitting-up pg31100.txt +fittingly pg3200.txt +fittings pg3200.txt +fitzowen, pg31100.txt +fitzwater pg100.txt +fitzwilliam pg31100.txt +fitzwilliam, pg31100.txt +fitzwilliam. pg31100.txt +five pg31100.txt, pg3200.txt, pg100.txt +five! pg3200.txt +five, pg31100.txt, pg3200.txt, pg100.txt +five--and pg3200.txt +five--aye, pg3200.txt +five--were pg3200.txt +five-and- pg3200.txt +five-and-twenty, pg100.txt +five-and-twenty. pg100.txt +five-eights pg3200.txt +five-finger-tied, pg100.txt +five-pound pg3200.txt +five-score. pg100.txt +five. pg3200.txt, pg100.txt +five." pg31100.txt, pg3200.txt +five: pg3200.txt +five; pg100.txt +fives, pg100.txt +fix pg31100.txt, pg3200.txt +fix'd pg100.txt +fix'd, pg100.txt +fix'd. pg100.txt +fix'd; pg100.txt +fix. pg31100.txt +fix." pg3200.txt +fixed pg31100.txt, pg3200.txt +fixed, pg31100.txt, pg3200.txt, pg100.txt +fixed-- pg3200.txt +fixed--but pg31100.txt +fixed. pg31100.txt, pg3200.txt +fixed." pg31100.txt, pg3200.txt +fixed; pg3200.txt +fixed?" pg3200.txt +fixedness pg3200.txt +fixes pg3200.txt, pg100.txt +fixeth pg100.txt +fixing pg31100.txt, pg3200.txt +fixture pg100.txt +fjeld, pg3200.txt +flabby pg3200.txt +flag pg3200.txt, pg100.txt +flag, pg3200.txt, pg100.txt +flag-ship pg3200.txt +flag-staff pg3200.txt +flag. pg3200.txt +flag." pg3200.txt +flag; pg3200.txt +flagon pg100.txt +flagons pg3200.txt +flags pg3200.txt, pg100.txt +flags, pg3200.txt +flags. pg3200.txt +flags; pg3200.txt +flagstaff pg3200.txt +flagstaff. pg3200.txt +flagstones pg3200.txt +flagstones--no pg3200.txt +flail pg3200.txt +flail, pg100.txt +flaked pg3200.txt +flakes pg100.txt +flakes. pg3200.txt +flambeau's! pg31100.txt +flambeaux. pg3200.txt +flame pg3200.txt, pg100.txt +flame, pg3200.txt, pg100.txt +flame. pg3200.txt, pg100.txt +flame; pg100.txt +flamed pg3200.txt +flamefront pg3200.txt +flamen, pg100.txt +flamens pg100.txt +flames pg3200.txt, pg100.txt +flames! pg3200.txt +flames, pg3200.txt +flaming pg3200.txt +flaminius pg100.txt +flaminius? pg100.txt +flanders. pg100.txt +flank--forward--march!" pg3200.txt +flannel pg31100.txt +flannel, pg3200.txt +flannel; pg100.txt +flap-dragon. pg100.txt +flapdoodle pg3200.txt +flapping pg3200.txt +flare pg3200.txt +flared pg3200.txt +flash pg3200.txt, pg100.txt +flash! pg3200.txt +flash, pg3200.txt, pg100.txt +flash. pg3200.txt +flash." pg3200.txt +flash: pg3200.txt +flashed pg3200.txt +flashed; pg3200.txt +flashes pg3200.txt +flashily pg3200.txt +flashing pg3200.txt +flask, pg100.txt +flask. pg100.txt +flat pg31100.txt, pg3200.txt, pg100.txt +flat! pg100.txt +flat, pg3200.txt, pg100.txt +flat-- pg3200.txt +flat-long. pg100.txt +flat-roof) pg3200.txt +flat. pg3200.txt, pg100.txt +flat." pg3200.txt +flat; pg100.txt +flatboatman pg3200.txt +flats pg3200.txt +flats, pg100.txt +flatted pg3200.txt +flatten pg3200.txt +flatter pg31100.txt, pg3200.txt, pg100.txt +flatter'd. pg100.txt +flatter'd? pg100.txt +flatter, pg100.txt +flatter- pg100.txt +flatter. pg3200.txt, pg100.txt +flatter; pg100.txt +flattered pg31100.txt, pg3200.txt +flattered, pg3200.txt +flattered. pg100.txt +flatterer pg100.txt +flatterer, pg100.txt +flatterer. pg100.txt +flatterer." pg31100.txt +flatterers pg100.txt +flatterers! pg100.txt +flatterers, pg100.txt +flatterers; pg100.txt +flatterers? pg100.txt +flatteries pg100.txt +flatteries, pg100.txt +flattering pg31100.txt, pg3200.txt +flattering--but pg31100.txt +flatters pg31100.txt +flattery pg31100.txt, pg100.txt +flattery, pg31100.txt, pg100.txt +flattery? pg100.txt +flatting pg3200.txt +flatulence:" pg3200.txt +flaunt pg3200.txt +flavius pg100.txt +flavius! pg100.txt +flavius. pg100.txt +flavor pg3200.txt +flavor, pg3200.txt +flavor. pg3200.txt +flavour, pg31100.txt +flavy, pg3200.txt +flaw pg3200.txt +flaw! pg100.txt +flaw, pg100.txt +flaw. pg100.txt +flaws pg3200.txt, pg100.txt +flax. pg100.txt +flax? pg100.txt +flay'd pg100.txt +flayed, pg3200.txt +flays pg3200.txt +flea pg3200.txt +flea's pg100.txt +flea. pg3200.txt, pg100.txt +flea?" pg3200.txt +fleance. pg100.txt +fleas pg3200.txt +fleas, pg3200.txt +fleas. pg3200.txt +fleas.]--the pg3200.txt +fleas; pg3200.txt +flecked pg3200.txt +fled pg31100.txt, pg3200.txt, pg100.txt +fled, pg3200.txt, pg100.txt +fled. pg3200.txt, pg100.txt +fled; pg3200.txt, pg100.txt +fled? pg100.txt +fledged pg3200.txt +fledged. pg3200.txt +fledglings pg3200.txt +flee pg3200.txt +flee; pg3200.txt +fleece, pg100.txt +fleece. pg100.txt +fleece: pg100.txt +fleecy pg3200.txt +fleeing pg3200.txt +fleeing, pg3200.txt +fleet pg3200.txt, pg100.txt +fleet'st, pg100.txt +fleet, pg3200.txt, pg100.txt +fleet. pg100.txt +fleet; pg100.txt +fleet? pg100.txt +fleetest." pg3200.txt +fleeting pg3200.txt +fleeting. pg3200.txt +fleets pg3200.txt +flehe pg3200.txt +fleissig! pg3200.txt +fleming pg100.txt +flesh pg3200.txt, pg100.txt +flesh! pg3200.txt, pg100.txt +flesh'd pg100.txt +flesh, pg3200.txt, pg100.txt +flesh- pg100.txt +flesh--he pg3200.txt +flesh. pg3200.txt, pg100.txt +flesh." pg3200.txt +flesh.' pg100.txt +flesh; pg100.txt +flesh? pg100.txt +fleshes pg100.txt +fleshly pg3200.txt +flew pg3200.txt +flew, pg3200.txt, pg100.txt +flew--leastways, pg3200.txt +flew. pg3200.txt +flew; pg3200.txt +flexibility pg31100.txt +flexible: pg100.txt +flexure. pg100.txt +flicker pg3200.txt +flickering pg3200.txt +flidge; pg100.txt +fliers, pg100.txt +fliers. pg100.txt +flies pg3200.txt, pg100.txt +flies! pg3200.txt +flies, pg3200.txt, pg100.txt +flies. pg3200.txt, pg100.txt +flies." pg3200.txt +flies; pg3200.txt, pg100.txt +flies? pg100.txt +flieth!" pg3200.txt +flieth. pg100.txt +flight pg31100.txt, pg3200.txt, pg100.txt +flight! pg3200.txt +flight, pg31100.txt, pg3200.txt, pg100.txt +flight- pg100.txt +flight. pg3200.txt, pg100.txt +flight: pg100.txt +flight; pg3200.txt, pg100.txt +flights pg3200.txt +flights. pg3200.txt +flights; pg3200.txt +flimsier pg3200.txt +flimsy pg3200.txt +flinched. pg3200.txt +flinders pg3200.txt +flinders?" pg3200.txt +fling pg31100.txt, pg3200.txt +flinging pg3200.txt +flint pg3200.txt, pg100.txt +flint, pg3200.txt, pg100.txt +flint-lock pg3200.txt +flint-lock: pg3200.txt +flint. pg100.txt +flint.' pg3200.txt +flint; pg100.txt +flinty pg3200.txt +flippantly pg3200.txt +flippantly; pg3200.txt +flirt pg31100.txt +flirt, pg31100.txt +flirt. pg3200.txt +flirtation pg31100.txt +flirtations, pg31100.txt +flit pg3200.txt +flitted pg3200.txt +flitting pg3200.txt +float pg3200.txt +floated pg3200.txt +floated. pg3200.txt +floating pg3200.txt +floating, pg3200.txt +floating. pg3200.txt +floating; pg3200.txt +floats pg3200.txt +flock pg3200.txt, pg100.txt +flock, pg100.txt +flock. pg3200.txt +flock; pg3200.txt, pg100.txt +flocked pg3200.txt +flocking pg3200.txt +flocks pg3200.txt, pg100.txt +flocks. pg3200.txt +flogged, pg31100.txt +flogged. pg3200.txt +flogging, pg3200.txt +flood pg3200.txt, pg100.txt +flood, pg100.txt +flood-time, pg3200.txt +flood. pg3200.txt, pg100.txt +flood; pg100.txt +flood? pg100.txt +flooded pg3200.txt +flooding pg3200.txt +floods pg3200.txt +floods, pg3200.txt, pg100.txt +floods. pg3200.txt +floods; pg100.txt +floor pg31100.txt, pg3200.txt +floor! pg3200.txt +floor!' pg3200.txt +floor"; pg3200.txt +floor, pg3200.txt, pg100.txt +floor,--the pg3200.txt +floor--" pg3200.txt +floor--another, pg3200.txt +floor--so." pg3200.txt +floor. pg3200.txt +floor." pg3200.txt +floor.' pg3200.txt +floor.. pg3200.txt +floor; pg3200.txt, pg100.txt +floor?' pg3200.txt +floors pg3200.txt +floors, pg31100.txt, pg3200.txt +floors. pg3200.txt +flop pg3200.txt +flora pg3200.txt, pg100.txt +floral pg3200.txt +florence pg3200.txt, pg100.txt +florence's pg100.txt +florence, pg3200.txt, pg100.txt +florence--a pg3200.txt +florence. pg3200.txt, pg100.txt +florence: pg3200.txt +florence; pg3200.txt +florentine pg3200.txt, pg100.txt +florentine, pg100.txt +florentine. pg100.txt +florentine? pg100.txt +florid pg31100.txt +florida pg3200.txt +florida; pg3200.txt +florin pg3200.txt +florins!' pg3200.txt +florizel pg100.txt +florizel, pg100.txt +florizel. pg100.txt +flossie pg3200.txt +flote pg100.txt +floundered pg3200.txt +floundering pg3200.txt +flour pg3200.txt +flourish pg31100.txt, pg3200.txt, pg100.txt +flourish! pg100.txt +flourish'd, pg100.txt +flourish, pg3200.txt, pg100.txt +flourish. pg100.txt +flourish] pg100.txt +flourished pg3200.txt +flourishes pg31100.txt +flourishes, pg100.txt +flourishes. pg3200.txt +flourisheth. pg100.txt +flourishing pg3200.txt +flourishing. pg3200.txt +flout pg100.txt +flout! pg100.txt +flout, pg100.txt +flouts, pg100.txt +flow pg31100.txt, pg3200.txt +flow'r pg100.txt +flow'r, pg100.txt +flow'rs pg100.txt +flow'rs, pg100.txt +flow'rs. pg100.txt +flow'rs: pg100.txt +flow) pg100.txt +flow, pg3200.txt, pg100.txt +flow. pg31100.txt, pg100.txt +flow.' pg3200.txt +flowed pg31100.txt, pg3200.txt, pg100.txt +flowed." pg3200.txt +flower pg3200.txt, pg100.txt +flower! pg3200.txt +flower" pg3200.txt +flower, pg3200.txt, pg100.txt +flower- pg100.txt +flower-bed pg3200.txt +flower-beds pg3200.txt +flower-de-luce? pg100.txt +flower-garden, pg31100.txt +flower-gardens pg3200.txt +flower-pots pg3200.txt +flower-pots, pg3200.txt +flower-vase pg3200.txt +flower-wreaths pg3200.txt +flower. pg3200.txt, pg100.txt +flower; pg100.txt +flower? pg100.txt +flowered pg3200.txt +flowering pg3200.txt +flowers pg3200.txt, pg100.txt +flowers, pg3200.txt, pg100.txt +flowers--5. pg3200.txt +flowers--for pg3200.txt +flowers. pg3200.txt, pg100.txt +flowers." pg31100.txt, pg3200.txt +flowers.] pg100.txt +flowers; pg100.txt +flowers? pg100.txt +flowery pg3200.txt +flowing pg3200.txt, pg100.txt +flowing, pg3200.txt +flowing. pg3200.txt +flown pg3200.txt, pg100.txt +flown, pg3200.txt +flown. pg31100.txt +flown; pg31100.txt +flows pg3200.txt +flows, pg100.txt +flows; pg100.txt +floyd pg3200.txt +floyd's pg3200.txt +fluellen pg100.txt +fluellen! pg100.txt +fluellen. pg100.txt +fluellen? pg100.txt +fluency pg3200.txt +fluently, pg3200.txt +fluid, pg3200.txt +fluid; pg3200.txt +flume--" pg3200.txt +flummery pg3200.txt +flung pg3200.txt +flunkey, pg3200.txt +flunkies pg3200.txt +flurry pg3200.txt +flush pg3200.txt +flush!" pg3200.txt +flush, pg100.txt +flush. pg3200.txt +flushed pg3200.txt +flushed. pg3200.txt +fluster pg3200.txt +flute, pg3200.txt, pg100.txt +flute. pg3200.txt +fluted pg3200.txt +flutes, pg3200.txt +flutter pg31100.txt, pg3200.txt +flutter, pg31100.txt +flutter. pg31100.txt, pg3200.txt +fluttered pg3200.txt +fluttered, pg31100.txt +fluttering pg3200.txt +fly pg31100.txt, pg3200.txt, pg100.txt +fly! pg100.txt +fly!" pg3200.txt, pg100.txt +fly, pg3200.txt, pg100.txt +fly- pg100.txt +fly-blowing. pg100.txt +fly-specked, pg3200.txt +fly-wheels. pg3200.txt +fly. pg3200.txt, pg100.txt +fly: pg100.txt +fly; pg100.txt +fly? pg100.txt +flyer: pg3200.txt +flying pg31100.txt, pg3200.txt +flying!" pg3200.txt +flying, pg3200.txt, pg100.txt +flying-fish pg3200.txt +flying. pg100.txt +fo' pg3200.txt +fo'castle pg3200.txt +fo'castle. pg3200.txt +foal; pg100.txt +foam pg3200.txt, pg100.txt +foam, pg100.txt +foam-flakes pg3200.txt +foam. pg3200.txt +foam." pg3200.txt +focal pg3200.txt +focative pg100.txt +focus pg3200.txt +focus. pg3200.txt +focused pg3200.txt +foe pg3200.txt, pg100.txt +foe! pg100.txt +foe" pg3200.txt +foe's pg3200.txt +foe, pg3200.txt, pg100.txt +foe- pg100.txt +foe. pg3200.txt, pg100.txt +foe; pg100.txt +foe? pg100.txt +foes pg31100.txt, pg3200.txt, pg100.txt +foes! pg100.txt +foes!' pg100.txt +foes, pg100.txt +foes. pg100.txt +foes; pg100.txt +foes? pg100.txt +fog pg3200.txt, pg100.txt +fog! pg3200.txt +fog, pg3200.txt +fog-horn pg3200.txt +fog. pg3200.txt, pg100.txt +fog.'" pg3200.txt +fog: pg3200.txt +fog?" pg3200.txt +fogs pg31100.txt, pg3200.txt +foible pg3200.txt +foil pg3200.txt, pg100.txt +foil'd pg100.txt +foil'd, pg100.txt +foil. pg100.txt +foiled, pg100.txt +foils pg100.txt +foils, pg100.txt +foin pg100.txt +foins. pg100.txt +foix, pg100.txt +fol-de-rol. pg3200.txt +fold pg3200.txt, pg100.txt +fold, pg3200.txt, pg100.txt +fold. pg3200.txt +fold; pg100.txt +fold? pg100.txt +folded pg31100.txt, pg3200.txt +folding pg31100.txt +folding-screen pg31100.txt +folds pg100.txt +foliage pg3200.txt +foliage, pg3200.txt +foliage. pg3200.txt +foliage; pg3200.txt +folio. pg100.txt +folk pg3200.txt, pg100.txt +folk!--there'd pg3200.txt +folk, pg3200.txt +folk. pg3200.txt +folks pg31100.txt, pg3200.txt +folks, pg3200.txt, pg100.txt +folks. pg3200.txt, pg100.txt +folks." pg3200.txt +folks; pg3200.txt +folks? pg100.txt +follies pg31100.txt, pg3200.txt, pg100.txt +follies, pg100.txt +follies. pg100.txt +follow pg31100.txt, pg3200.txt, pg100.txt +follow! pg100.txt +follow!" pg3200.txt +follow'd pg100.txt +follow'd, pg100.txt +follow'd- pg100.txt +follow'd. pg100.txt +follow'd? pg100.txt +follow, pg31100.txt, pg3200.txt, pg100.txt +follow,--this pg3200.txt +follow. pg31100.txt, pg3200.txt, pg100.txt +follow." pg3200.txt +follow.] pg3200.txt, pg100.txt +follow: pg3200.txt +follow; pg100.txt +follow? pg100.txt +follow] pg100.txt +followed pg31100.txt, pg3200.txt, pg100.txt +followed! pg100.txt +followed' pg3200.txt +followed, pg31100.txt, pg3200.txt +followed-- pg3200.txt +followed--followed pg3200.txt +followed. pg31100.txt, pg3200.txt, pg100.txt +followed: pg31100.txt, pg3200.txt +followed; pg3200.txt +follower pg100.txt +follower! pg100.txt +follower. pg100.txt +followers pg3200.txt, pg100.txt +followers, pg3200.txt, pg100.txt +followers. pg3200.txt, pg100.txt +followers.] pg100.txt +followers; pg3200.txt, pg100.txt +followers? pg100.txt +followers]. pg100.txt +following pg31100.txt, pg3200.txt, pg100.txt +following, pg31100.txt, pg3200.txt, pg100.txt +following--as pg31100.txt +following. pg3200.txt, pg100.txt +following: pg3200.txt +following]. pg100.txt +follows pg31100.txt, pg3200.txt, pg100.txt +follows, pg3200.txt, pg100.txt +follows-- pg3200.txt +follows. pg3200.txt, pg100.txt +follows." pg31100.txt +follows: pg31100.txt, pg3200.txt +follows:-- pg31100.txt, pg3200.txt +follows; pg3200.txt +follows? pg100.txt +follows?' pg100.txt +folly pg31100.txt, pg3200.txt, pg100.txt +folly! pg3200.txt +folly!" pg3200.txt +folly, pg31100.txt, pg3200.txt, pg100.txt +folly. pg31100.txt, pg3200.txt, pg100.txt +folly." pg31100.txt +folly.--it pg31100.txt +folly; pg31100.txt, pg100.txt +folly? pg100.txt +foment pg3200.txt +fond pg31100.txt, pg3200.txt, pg100.txt +fond, pg3200.txt, pg100.txt +fond. pg3200.txt +fondle pg3200.txt +fondled pg3200.txt +fondness pg31100.txt, pg100.txt +fondness, pg31100.txt +fondness. pg3200.txt, pg100.txt +font pg3200.txt +font- pg100.txt +font. pg100.txt +fontibell. pg100.txt +food pg31100.txt, pg3200.txt, pg100.txt +food, pg31100.txt, pg3200.txt, pg100.txt +food- pg100.txt +food. pg3200.txt, pg100.txt +food." pg3200.txt +food.' pg100.txt +food; pg3200.txt, pg100.txt +food? pg100.txt +food] pg100.txt +foods, pg3200.txt +fool pg31100.txt, pg3200.txt, pg100.txt +fool! pg100.txt +fool!" pg3200.txt +fool' pg3200.txt +fool'd pg100.txt +fool'ry. pg100.txt +fool's pg3200.txt +fool's. pg100.txt +fool's? pg100.txt +fool) pg3200.txt +fool, pg31100.txt, pg3200.txt, pg100.txt +fool--" pg3200.txt +fool. pg3200.txt, pg100.txt +fool." pg31100.txt, pg3200.txt +fool.' pg3200.txt +fool: pg100.txt +fool; pg100.txt +fool? pg3200.txt, pg100.txt +fool?" pg31100.txt, pg3200.txt +foolery, pg3200.txt +foolery. pg100.txt +foolhardy pg3200.txt +fooling pg3200.txt +fooling, pg100.txt +fooling. pg100.txt +foolish pg31100.txt, pg3200.txt, pg100.txt +foolish, pg31100.txt +foolish--so pg3200.txt +foolish. pg3200.txt, pg100.txt +foolish." pg31100.txt +foolish; pg3200.txt +foolish? pg3200.txt, pg100.txt +foolishly. pg100.txt +foolishness pg31100.txt, pg3200.txt +foolishness!" pg3200.txt +foolishness, pg3200.txt, pg100.txt +foolishness--and pg3200.txt +foolishness. pg3200.txt +foolishness." pg3200.txt +foolishness? pg3200.txt +foolishness?" pg3200.txt +foolishnesses pg3200.txt +fools pg3200.txt, pg100.txt +fools! pg100.txt +fools, pg3200.txt, pg100.txt +fools. pg3200.txt, pg100.txt +fools.' pg3200.txt +fools; pg100.txt +fools? pg100.txt +fools?' pg3200.txt +foot pg31100.txt, pg3200.txt, pg100.txt +foot! pg3200.txt, pg100.txt +foot!' pg3200.txt +foot's pg3200.txt +foot, pg31100.txt, pg3200.txt, pg100.txt +foot," pg3200.txt +foot- pg100.txt +foot--no, pg3200.txt +foot-baths pg3200.txt +foot-licker. pg100.txt +foot-pace. pg31100.txt +foot-wall pg3200.txt +foot. pg3200.txt, pg100.txt +foot; pg3200.txt, pg100.txt +foot? pg100.txt +footboy pg100.txt +footboys. pg100.txt +footfall; pg3200.txt +footfalls pg3200.txt +footfalls--one--two--three. pg3200.txt +foothold pg3200.txt +footing pg3200.txt, pg100.txt +footing, pg3200.txt +footing. pg100.txt +footing; pg31100.txt +footman pg31100.txt, pg3200.txt +footman, pg3200.txt +footman. pg100.txt +footman? pg100.txt +footmen pg31100.txt, pg3200.txt +footnotes pg3200.txt +footnotes: pg3200.txt +footpath pg3200.txt +footprint pg3200.txt +footprints pg3200.txt +footprints, pg3200.txt +footprints. pg3200.txt +foots pg3200.txt +footstep pg3200.txt +footstep. pg3200.txt +footsteps pg3200.txt +footsteps. pg31100.txt, pg3200.txt +footsteps." pg31100.txt +footsteps: pg31100.txt +footstool pg3200.txt +footstool. pg3200.txt +footworn pg3200.txt +fopped pg100.txt +foppish; pg100.txt +fops pg100.txt +for! pg3200.txt, pg100.txt +for!' pg3200.txt +for"-- pg31100.txt +for't! pg100.txt +for't, pg100.txt +for't. pg100.txt +for't.' pg100.txt +for't; pg100.txt +for't? pg100.txt +for) pg3200.txt +for, pg31100.txt, pg3200.txt, pg100.txt +for," pg3200.txt +for- pg100.txt +for-- pg3200.txt +for--" pg3200.txt +for--and pg3200.txt +for--hey?" pg3200.txt +for-duty's-sake pg3200.txt +for. pg31100.txt, pg3200.txt, pg100.txt +for." pg31100.txt, pg3200.txt +for.' pg3200.txt, pg100.txt +for.--i pg31100.txt +for: pg100.txt +for; pg31100.txt, pg3200.txt, pg100.txt +for;" pg31100.txt +for? pg31100.txt, pg3200.txt, pg100.txt +for?" pg31100.txt, pg3200.txt +for?' pg3200.txt +foragers pg3200.txt +foray pg3200.txt +forbade pg3200.txt +forbear pg3200.txt +forbear! pg100.txt +forbear't. pg100.txt +forbear, pg31100.txt, pg100.txt +forbear. pg100.txt +forbear; pg100.txt +forbearance pg31100.txt, pg3200.txt +forbearance. pg31100.txt, pg100.txt +forbearance: pg100.txt +forbearance; pg100.txt +forbeare pg3200.txt +forbearing pg31100.txt +forbearing, pg3200.txt +forbears. pg100.txt +forbes pg3200.txt +forbid pg31100.txt, pg3200.txt, pg100.txt +forbid! pg100.txt +forbid!- pg100.txt +forbid, pg100.txt +forbid. pg31100.txt, pg100.txt +forbid; pg3200.txt, pg100.txt +forbid? pg100.txt +forbidden pg3200.txt +forbidden. pg3200.txt +forbidden." pg3200.txt +forbidden; pg3200.txt +forbidden? pg31100.txt +forbidden?' pg3200.txt +forbiddenly. pg100.txt +forbidding pg31100.txt, pg3200.txt +forbidding, pg3200.txt +forbids pg3200.txt, pg100.txt +forbids. pg100.txt +forbids?" pg3200.txt +forbore pg31100.txt, pg3200.txt +forbore. pg31100.txt +forc'd pg100.txt +forc'd. pg100.txt +forc'd? pg100.txt +force pg31100.txt, pg3200.txt, pg100.txt +force! pg3200.txt +force, pg3200.txt, pg100.txt +force- pg100.txt +force. pg31100.txt, pg3200.txt, pg100.txt +force." pg3200.txt +force.' pg3200.txt +force: pg100.txt +force; pg3200.txt, pg100.txt +force? pg100.txt +forced pg31100.txt, pg3200.txt +forced. pg31100.txt, pg3200.txt +forces pg3200.txt, pg100.txt +forces, pg3200.txt, pg100.txt +forces. pg3200.txt, pg100.txt +forces? pg100.txt +forcible pg3200.txt +forcible. pg100.txt +forcibly pg31100.txt, pg3200.txt, pg100.txt +forcibly, pg31100.txt, pg3200.txt +forcibly. pg31100.txt, pg3200.txt +forcibly] pg100.txt +forcing pg31100.txt, pg3200.txt +ford pg3200.txt, pg100.txt +ford! pg100.txt +ford's, pg31100.txt +ford's. pg31100.txt +ford, pg100.txt +ford- pg100.txt +ford. pg31100.txt, pg100.txt +ford." pg31100.txt +ford; pg100.txt +fordone. pg100.txt +fords pg3200.txt +fore! pg3200.txt +fore-and-aft, pg3200.txt +fore-betrayed, pg100.txt +fore-finger; pg3200.txt +fore-foot!" pg3200.txt +fore-front pg3200.txt +fore-noon, pg3200.txt +fore-paws pg3200.txt +fore. pg3200.txt +forebodings. pg3200.txt +forebore pg3200.txt +forecast pg3200.txt +forecast, pg100.txt +forecasting pg3200.txt +forecastle pg3200.txt +forecastle--and pg3200.txt +forecastle; pg3200.txt +foreclose. pg3200.txt +forefathers pg100.txt +forefelt pg3200.txt +forefelt, pg3200.txt +forefend!--may pg3200.txt +forefinger--now pg3200.txt +forefinger. pg3200.txt +forego pg3200.txt +foregoing, pg3200.txt +foregoing. pg3200.txt +foregoing.] pg3200.txt +foregone, pg100.txt +foreground, pg3200.txt +foreground; pg3200.txt +forehatch, pg3200.txt +forehead pg3200.txt, pg100.txt +forehead, pg3200.txt, pg100.txt +forehead--imagining pg3200.txt +forehead. pg100.txt +forehead; pg3200.txt +foreheads pg3200.txt +foreheads, pg3200.txt +foreign pg3200.txt +foreign. pg3200.txt +foreigner pg3200.txt +foreigner, pg3200.txt +foreigner-- pg3200.txt +foreigner. pg3200.txt +foreigners pg3200.txt +foreigners, pg3200.txt +foreigners. pg3200.txt +foreignness pg3200.txt +foreman pg3200.txt +foreman, pg3200.txt +foreman. pg3200.txt +foreman: pg3200.txt +foremanship, pg3200.txt +foremen pg3200.txt +foremost pg31100.txt, pg3200.txt +foremost. pg100.txt +forenoon pg31100.txt +forenoon. pg31100.txt, pg3200.txt +foreordination pg3200.txt +foresaw pg31100.txt, pg100.txt +foresaw, pg31100.txt +foresee pg3200.txt +foresee, pg3200.txt, pg100.txt +foresee. pg100.txt +foreseeing pg31100.txt +foreseen pg31100.txt, pg3200.txt +foreseen, pg31100.txt +foreseen. pg31100.txt +foreshadowed pg3200.txt +foresight pg31100.txt +foresight. pg3200.txt +foresight; pg3200.txt +foresightedness, pg3200.txt +foreskirt. pg100.txt +forespent pg100.txt +forest pg31100.txt, pg3200.txt, pg100.txt +forest" pg3200.txt +forest, pg3200.txt, pg100.txt +forest," pg3200.txt +forest-born, pg100.txt +forest. pg3200.txt, pg100.txt +forest; pg100.txt +forest? pg100.txt +forestall pg31100.txt, pg100.txt +forestalled. pg31100.txt +forester pg100.txt +forester." pg3200.txt +forester; pg100.txt +forester? pg100.txt +foresters pg100.txt +forests pg3200.txt +forests, pg31100.txt +foretastes pg3200.txt +foretell pg31100.txt, pg3200.txt, pg100.txt +foretell." pg3200.txt +foretells pg100.txt +foretold pg31100.txt, pg3200.txt +foretold, pg31100.txt, pg3200.txt +foretold." pg3200.txt +foretop pg3200.txt +foretop, pg3200.txt +forever pg3200.txt, pg100.txt +forever! pg31100.txt +forever!" pg3200.txt +forever, pg3200.txt, pg100.txt +forever--" pg3200.txt +forever--at pg3200.txt +forever--gone pg3200.txt +forever. pg31100.txt, pg3200.txt, pg100.txt +forever." pg31100.txt, pg3200.txt +forever.'" pg3200.txt +forever: pg3200.txt +forever; pg31100.txt, pg3200.txt +forever? pg100.txt +forever?" pg3200.txt +forevermore--and pg3200.txt +forevermore. pg3200.txt +forewarn'd. pg100.txt +foreword pg3200.txt +forewords. pg3200.txt +forfeit pg31100.txt, pg3200.txt, pg100.txt +forfeit; pg100.txt +forfeited pg31100.txt +forfeited; pg100.txt +forfeiting pg100.txt +forfeiture pg100.txt +forfeiture, pg100.txt +forfeiture. pg100.txt +forfeiture? pg100.txt +forfeitures pg100.txt +forfend pg100.txt +forfend! pg100.txt +forfend. pg100.txt +forgave pg3200.txt, pg100.txt +forge pg100.txt +forged pg3200.txt +forgeries-- pg3200.txt +forgeries?" pg3200.txt +forgery pg3200.txt +forgery, pg3200.txt +forges. pg100.txt +forget pg31100.txt, pg3200.txt, pg100.txt +forget! pg100.txt +forget!" pg31100.txt +forget, pg3200.txt, pg100.txt +forget--" pg3200.txt +forget--skunk pg3200.txt +forget--then pg3200.txt +forget-me-not pg3200.txt +forget-me-nots pg3200.txt +forget. pg3200.txt, pg100.txt +forget." pg3200.txt +forget: pg3200.txt +forget; pg100.txt +forgetful pg100.txt +forgetful. pg3200.txt, pg100.txt +forgetful? pg100.txt +forgetful?" pg31100.txt +forgetfulness pg3200.txt +forgetfulness! pg100.txt +forgetfulness, pg3200.txt +forgetfulness. pg31100.txt +forgetfulness? pg100.txt +forgets pg31100.txt, pg3200.txt, pg100.txt +forgettable pg3200.txt +forgetteth pg3200.txt +forgetting pg31100.txt, pg3200.txt +forgetting! pg3200.txt +forgetting, pg100.txt +forgit pg3200.txt +forgive pg31100.txt, pg3200.txt, pg100.txt +forgive!" pg3200.txt +forgive, pg3200.txt, pg100.txt +forgive. pg3200.txt +forgive." pg3200.txt +forgive; pg3200.txt, pg100.txt +forgive?" pg3200.txt +forgiven pg31100.txt, pg3200.txt, pg100.txt +forgiven, pg31100.txt +forgiven. pg3200.txt +forgiveness pg31100.txt, pg3200.txt +forgiveness! pg100.txt +forgiveness!' pg100.txt +forgiveness, pg3200.txt +forgiveness. pg3200.txt, pg100.txt +forgiveness." pg3200.txt +forgiveness? pg3200.txt, pg100.txt +forgiving pg3200.txt +forgo pg100.txt +forgo; pg100.txt +forgot pg31100.txt, pg3200.txt, pg100.txt +forgot! pg100.txt +forgot!' pg100.txt +forgot, pg31100.txt, pg3200.txt, pg100.txt +forgot- pg100.txt +forgot. pg31100.txt, pg3200.txt, pg100.txt +forgot." pg31100.txt, pg3200.txt +forgot; pg3200.txt, pg100.txt +forgot? pg100.txt +forgotten pg31100.txt, pg3200.txt, pg100.txt +forgotten! pg3200.txt, pg100.txt +forgotten, pg31100.txt, pg3200.txt, pg100.txt +forgotten--and pg3200.txt +forgotten--i pg3200.txt +forgotten--the pg3200.txt +forgotten. pg31100.txt, pg3200.txt, pg100.txt +forgotten." pg31100.txt +forgotten; pg31100.txt, pg3200.txt +forgotten? pg100.txt +forgotten?" pg31100.txt +fork pg3200.txt, pg100.txt +fork; pg3200.txt +forked pg3200.txt, pg100.txt +forks pg3200.txt +forks, pg3200.txt +forlorn pg31100.txt, pg3200.txt, pg100.txt +forlorn! pg100.txt +forlorn, pg3200.txt, pg100.txt +forlorn. pg31100.txt, pg3200.txt +forlorn; pg3200.txt, pg100.txt +forlornly. pg3200.txt +forlornness pg3200.txt +form pg31100.txt, pg3200.txt, pg100.txt +form, pg31100.txt, pg3200.txt, pg100.txt +form- pg100.txt +form. pg31100.txt, pg3200.txt, pg100.txt +form: pg3200.txt +form; pg31100.txt, pg3200.txt, pg100.txt +form?" pg3200.txt +formal pg31100.txt, pg3200.txt, pg100.txt +formalities. pg3200.txt +formality pg31100.txt, pg3200.txt +formality, pg31100.txt, pg3200.txt +formality. pg3200.txt +formally pg3200.txt +format pg31100.txt, pg3200.txt, pg100.txt +formation pg3200.txt +formation. pg3200.txt +formations. pg3200.txt +formed pg31100.txt, pg3200.txt +formed, pg31100.txt, pg3200.txt +formed--its pg3200.txt +formed. pg31100.txt, pg3200.txt +formed." pg31100.txt +former pg31100.txt, pg3200.txt +former, pg31100.txt, pg3200.txt +former. pg31100.txt, pg3200.txt +former.'" pg3200.txt +former; pg31100.txt, pg100.txt +formerly pg31100.txt, pg3200.txt +formerly, pg31100.txt, pg3200.txt +formerly. pg31100.txt, pg100.txt +formidable pg31100.txt, pg3200.txt +formidable; pg3200.txt +formidably, pg3200.txt +forming pg31100.txt, pg3200.txt +formless pg3200.txt +forms pg3200.txt, pg100.txt +forms! pg100.txt +forms, pg31100.txt, pg3200.txt, pg100.txt +forms--for pg3200.txt +forms. pg100.txt +forms.... pg3200.txt +formula pg3200.txt +formula, pg3200.txt +formula: pg3200.txt +formulated pg3200.txt +fornication pg100.txt +fornication, pg100.txt +forrard, pg3200.txt +forrard--gaping pg3200.txt +forres? pg100.txt +fors pg31100.txt +forsake pg3200.txt, pg100.txt +forsake, pg100.txt +forsake. pg100.txt +forsaken! pg3200.txt +forsaken!" pg3200.txt +forsaken, pg3200.txt, pg100.txt +forsaken--" pg3200.txt +forsakenness pg3200.txt +forsook pg3200.txt +forsooth! pg3200.txt +forsooth, pg100.txt +forsooth. pg100.txt +forsooth; pg100.txt +forster, pg31100.txt +forster?" pg31100.txt +forsters pg31100.txt +forswear pg100.txt +forswear't. pg100.txt +forswear, pg100.txt +forswear. pg100.txt +forswear; pg100.txt +forsworn pg100.txt +forsworn! pg100.txt +forsworn, pg100.txt +forsworn- pg100.txt +forsworn. pg100.txt +forsworn: pg100.txt +forsworn; pg100.txt +forsworn? pg100.txt +fort pg3200.txt +fort. pg3200.txt, pg100.txt +fort: pg3200.txt +fort; pg3200.txt +forth pg31100.txt, pg3200.txt, pg100.txt +forth! pg100.txt +forth!" pg3200.txt +forth, pg31100.txt, pg3200.txt, pg100.txt +forth- pg100.txt +forth-- pg3200.txt +forth. pg31100.txt, pg3200.txt, pg100.txt +forth: pg100.txt +forth; pg31100.txt, pg3200.txt, pg100.txt +forth? pg100.txt +forthcoming pg3200.txt +forthcoming. pg3200.txt, pg100.txt +forthright, pg100.txt +forthwith pg100.txt +forthwith, pg100.txt +forthwith. pg100.txt +fortifications, pg3200.txt +fortifications. pg100.txt +fortified pg3200.txt +fortified, pg3200.txt, pg100.txt +fortified; pg100.txt +fortifies. pg100.txt +fortify pg3200.txt, pg100.txt +fortify. pg100.txt +fortifying pg31100.txt +fortinbras pg100.txt +fortinbras, pg100.txt +fortinbras. pg100.txt +fortitude pg31100.txt, pg3200.txt +fortitude! pg31100.txt, pg3200.txt +fortitude, pg31100.txt, pg3200.txt, pg100.txt +fortitude. pg31100.txt, pg3200.txt +fortnight pg31100.txt, pg3200.txt, pg100.txt +fortnight's pg31100.txt, pg3200.txt +fortnight, pg31100.txt, pg3200.txt +fortnight. pg31100.txt, pg3200.txt +fortnight." pg31100.txt +fortnight? pg100.txt +fortress pg3200.txt +fortress, pg3200.txt +fortress. pg3200.txt +fortress." pg3200.txt +fortresses pg3200.txt +fortresses, pg100.txt +fortresses. pg3200.txt +forts pg3200.txt +forts, pg3200.txt, pg100.txt +fortunate pg31100.txt, pg3200.txt, pg100.txt +fortunate! pg100.txt +fortunate, pg100.txt +fortunate-unhappy.' pg100.txt +fortunate. pg3200.txt, pg100.txt +fortunate." pg3200.txt +fortunate: pg100.txt +fortunately pg31100.txt, pg3200.txt +fortune pg31100.txt, pg3200.txt, pg100.txt +fortune! pg31100.txt, pg100.txt +fortune!" pg3200.txt +fortune's pg100.txt +fortune's-hall, pg100.txt +fortune, pg31100.txt, pg3200.txt, pg100.txt +fortune," pg31100.txt +fortune--(again pg31100.txt +fortune--for pg3200.txt +fortune-hunter." pg31100.txt +fortune-teller, pg100.txt +fortune. pg31100.txt, pg3200.txt, pg100.txt +fortune." pg31100.txt, pg3200.txt +fortune.' pg100.txt +fortune.--but pg31100.txt +fortune; pg31100.txt, pg3200.txt, pg100.txt +fortune? pg100.txt +fortune?" pg31100.txt, pg3200.txt +fortuned. pg100.txt +fortunes pg31100.txt, pg3200.txt, pg100.txt +fortunes! pg100.txt +fortunes!" pg31100.txt +fortunes, pg3200.txt, pg100.txt +fortunes. pg3200.txt, pg100.txt +fortunes: pg100.txt +fortunes; pg100.txt +fortunes? pg100.txt +forty pg31100.txt, pg3200.txt +forty!" pg3200.txt +forty'; pg3200.txt +forty, pg31100.txt, pg3200.txt +forty- pg3200.txt +forty--five pg3200.txt +forty-eight pg3200.txt +forty-eight. pg100.txt +forty-five pg3200.txt +forty-four pg3200.txt +forty-nine pg3200.txt +forty-one pg3200.txt +forty-seven pg3200.txt +forty-six pg3200.txt +forty-three pg3200.txt +forty-two pg3200.txt +forty. pg31100.txt, pg3200.txt, pg100.txt +forty." pg31100.txt +forty?" pg31100.txt +forum pg3200.txt, pg100.txt +forum, pg3200.txt +forum. pg100.txt +forward pg31100.txt, pg3200.txt, pg100.txt +forward!" pg3200.txt +forward, pg31100.txt, pg3200.txt, pg100.txt +forward- pg100.txt +forward--came pg31100.txt +forward--hesitated--stopped. pg3200.txt +forward--march!" pg3200.txt +forward-march!" pg3200.txt +forward. pg31100.txt, pg3200.txt, pg100.txt +forward." pg31100.txt +forward.' pg3200.txt +forward: pg3200.txt +forward; pg3200.txt, pg100.txt +forward? pg31100.txt, pg100.txt +forward] pg100.txt +forwarded pg31100.txt, pg3200.txt +forwarded. pg3200.txt +forwarder. pg31100.txt +forwarding pg31100.txt, pg3200.txt +forwardness pg100.txt +forwardness. pg100.txt +forwardness? pg100.txt +forwards pg31100.txt +fosse pg3200.txt +fossil." pg3200.txt +fossils, pg3200.txt +fossils; pg3200.txt +fost'ring pg100.txt +foster pg3200.txt +foster, pg3200.txt +foster--and pg3200.txt +foster-father pg3200.txt +foster-nurse, pg100.txt +foster. pg3200.txt +fostered; pg100.txt +fosters pg3200.txt +fosters' pg3200.txt +fosters?" pg3200.txt +foteen pg3200.txt +fought pg3200.txt, pg100.txt +fought, pg3200.txt, pg100.txt +fought. pg3200.txt +fought; pg100.txt +fought] pg3200.txt +foul pg3200.txt, pg100.txt +foul! pg100.txt +foul'd, pg100.txt +foul, pg100.txt +foul-mouth'd pg100.txt +foul. pg100.txt +foul? pg100.txt +foulard pg3200.txt +fouler. pg100.txt +foulest, pg3200.txt +foulness, pg100.txt +foulness; pg3200.txt, pg100.txt +found pg31100.txt, pg3200.txt, pg100.txt +found!" pg3200.txt +found, pg31100.txt, pg3200.txt, pg100.txt +found. pg31100.txt, pg3200.txt, pg100.txt +found." pg31100.txt, pg3200.txt +found.' pg100.txt +found; pg3200.txt, pg100.txt +found? pg3200.txt, pg100.txt +found?" pg3200.txt +foundation pg31100.txt, pg3200.txt, pg100.txt +foundation! pg100.txt +foundation" pg31100.txt, pg3200.txt, pg100.txt +foundation, pg31100.txt, pg3200.txt, pg100.txt +foundation. pg3200.txt +foundation." pg31100.txt, pg3200.txt, pg100.txt +foundations pg3200.txt +foundations. pg3200.txt +foundations; pg3200.txt +founde pg3200.txt +founded pg31100.txt, pg3200.txt +founded. pg31100.txt, pg3200.txt +founder pg3200.txt +founder'd pg100.txt +founding pg3200.txt +foundling pg3200.txt +foundlings pg3200.txt +fount pg100.txt +fount, pg100.txt +fountain pg3200.txt, pg100.txt +fountain, pg3200.txt, pg100.txt +fountain. pg3200.txt +fountain." pg3200.txt +fountain?" pg3200.txt +fountains pg3200.txt +four pg31100.txt, pg3200.txt, pg100.txt +four!" pg3200.txt +four!' pg3200.txt +four, pg31100.txt, pg3200.txt +four- pg100.txt +four--only pg3200.txt +four--postilions pg31100.txt +four-and-twenty pg31100.txt +four-fifths. pg3200.txt +four-fifths? pg3200.txt +four-inch'd pg100.txt +four-score pg100.txt +four. pg31100.txt, pg3200.txt, pg100.txt +four." pg3200.txt +four; pg100.txt +four?' pg3200.txt +fours pg3200.txt +fours, pg3200.txt +fours; pg3200.txt +fourscore pg100.txt +fourscore-three pg100.txt +fourteen pg31100.txt, pg3200.txt, pg100.txt +fourteen, pg31100.txt +fourteen--" pg3200.txt +fourteen--and pg3200.txt +fourteen--no, pg3200.txt +fourteen-seven-fifty pg3200.txt +fourteen. pg31100.txt, pg100.txt +fourteen; pg100.txt +fourth pg31100.txt, pg3200.txt, pg100.txt +fourth! pg100.txt +fourth, pg3200.txt, pg100.txt +fourth. pg3200.txt, pg100.txt +fourths pg3200.txt +fourtou pg3200.txt +fourtou, pg3200.txt +fowl pg100.txt +fowl! pg100.txt +fowl- pg100.txt +fowl? pg100.txt +fowls pg100.txt +fowls, pg31100.txt, pg100.txt +fowls. pg3200.txt +fox pg100.txt +fox, pg100.txt +fox-fire, pg3200.txt +fox-hunting--for pg3200.txt +fox. pg100.txt +fox." pg3200.txt +fox; pg3200.txt, pg100.txt +fox?' pg3200.txt +foxes. pg3200.txt +foxship pg100.txt +fra pg3200.txt +fraction pg3200.txt +fractions, pg100.txt +fracture. pg3200.txt +fractured pg3200.txt +fractures pg3200.txt +fragment pg3200.txt, pg100.txt +fragment? pg100.txt +fragmentary pg3200.txt +fragmentary, pg3200.txt +fragmented pg31100.txt +fragments pg3200.txt +fragments, pg3200.txt +fragments. pg100.txt +fragments; pg3200.txt +fragrance pg3200.txt +fragrance, pg3200.txt +fragrance--if pg3200.txt +fragrance. pg3200.txt +fragrance; pg3200.txt +fragrances pg3200.txt +fragrant pg3200.txt +fragrant!' pg3200.txt +frail pg3200.txt, pg100.txt +frail. pg3200.txt, pg100.txt +frail; pg100.txt +frailness pg3200.txt +frailties pg100.txt +frailty pg100.txt +frailty, pg100.txt +frame pg31100.txt, pg3200.txt, pg100.txt +frame! pg3200.txt +frame!- pg100.txt +frame, pg31100.txt, pg3200.txt, pg100.txt +frame-houses pg3200.txt +frame. pg100.txt +frame." pg3200.txt +frame; pg3200.txt +frame? pg100.txt +framed pg3200.txt +framed.' pg100.txt +frames pg3200.txt, pg100.txt +framing pg3200.txt +franc pg3200.txt +france pg3200.txt, pg100.txt +france! pg3200.txt, pg100.txt +france!! pg3200.txt +france!" pg3200.txt +france's pg3200.txt +france, pg31100.txt, pg3200.txt, pg100.txt +france- pg100.txt +france--" pg3200.txt +france--had pg3200.txt +france. pg31100.txt, pg3200.txt, pg100.txt +france." pg31100.txt, pg3200.txt +france.' pg100.txt +france.'" pg3200.txt +france: pg3200.txt, pg100.txt +france; pg3200.txt, pg100.txt +france? pg3200.txt, pg100.txt +france?" pg3200.txt +frances pg31100.txt +frances." pg3200.txt +franchise pg100.txt +franchisement. pg100.txt +franchises pg3200.txt +franciae. pg100.txt +francis pg31100.txt, pg3200.txt, pg100.txt +francis! pg100.txt +francis- pg100.txt +francis. pg100.txt +francis.] pg100.txt +francis? pg100.txt +francisca pg100.txt +franciscan. pg3200.txt, pg100.txt +francisco pg3200.txt, pg100.txt +francisco!" pg3200.txt +francisco), pg3200.txt +francisco, pg3200.txt +francisco,......3,500 pg3200.txt +francisco. pg3200.txt, pg100.txt +francisco." pg3200.txt +francisco: pg3200.txt +francisco; pg3200.txt +francois pg3200.txt +francois--" pg3200.txt +francs pg3200.txt +francs!" pg3200.txt +francs, pg3200.txt +francs. pg3200.txt +francs." pg3200.txt +francs.' pg3200.txt +frank pg31100.txt, pg3200.txt +frank's pg31100.txt +frank, pg31100.txt, pg3200.txt +frank. pg31100.txt +frank." pg31100.txt +frank; pg31100.txt +frank? pg100.txt +frankenstein pg3200.txt +frankenstein. pg3200.txt +frankest pg3200.txt +frankfort pg3200.txt +frankfort, pg3200.txt +frankfort," pg3200.txt +frankfort. pg3200.txt +frankfurt-on-the-main, pg3200.txt +franklin pg3200.txt +franklin" pg3200.txt +franklin's pg3200.txt +franklin, pg3200.txt +franklin--[if pg3200.txt +franklin. pg3200.txt +franklins pg100.txt +frankly pg3200.txt +frankly. pg100.txt +frankness pg3200.txt +frankness. pg3200.txt +franks. pg3200.txt +franky pg3200.txt +fransos. pg3200.txt +frantic pg3200.txt +frantic, pg3200.txt, pg100.txt +frantic- pg100.txt +frantic." pg31100.txt +frantically pg3200.txt +frantically: pg3200.txt +frari, pg3200.txt +fraser's pg31100.txt +frasers pg31100.txt +frasers. pg31100.txt +fraternal pg31100.txt +fraternity pg3200.txt +fraternized pg3200.txt +frau pg3200.txt +frau? pg3200.txt +fraud pg3200.txt +fraud, pg3200.txt +fraud--and pg3200.txt +fraud. pg3200.txt, pg100.txt +fraud." pg3200.txt +frauds pg3200.txt +frauds! pg3200.txt +frauds, pg3200.txt +frauds. pg3200.txt +fraught pg100.txt +fraught, pg100.txt +fraught. pg100.txt +fray! pg100.txt +fray, pg100.txt +fray. pg3200.txt, pg100.txt +fray." pg31100.txt +fray; pg100.txt +fray? pg100.txt +frayed pg3200.txt +frays pg100.txt +freak pg3200.txt +freak. pg3200.txt +freaks pg31100.txt +freaks--in pg3200.txt +freaks. pg3200.txt +freckled pg31100.txt +freckles. pg3200.txt +freckles." pg31100.txt +fred pg3200.txt +frederica pg31100.txt +frederica's pg31100.txt +frederica, pg31100.txt +frederica; pg31100.txt +frederick pg31100.txt, pg3200.txt, pg100.txt +frederick!" pg31100.txt +frederick's pg31100.txt +frederick, pg31100.txt, pg100.txt +frederick. pg31100.txt, pg100.txt +frederick." pg31100.txt +fredonia pg3200.txt +fredonia, pg3200.txt +fredonia: pg3200.txt +free pg31100.txt, pg3200.txt, pg100.txt +free! pg3200.txt +free!" pg3200.txt +free!' pg3200.txt +free't! pg100.txt +free, pg31100.txt, pg3200.txt, pg100.txt +free--and pg3200.txt +free--none pg3200.txt +free-and-easy pg3200.txt +free-booter pg3200.txt +free-footed. pg100.txt +free-hand pg3200.txt +free-man's pg3200.txt +free-spoken pg31100.txt +free-trade!" pg3200.txt +free. pg31100.txt, pg3200.txt, pg100.txt +free." pg3200.txt +free.' pg100.txt +free: pg100.txt +free; pg31100.txt, pg3200.txt, pg100.txt +free? pg100.txt +free?" pg3200.txt +freed pg3200.txt, pg100.txt +freed? pg100.txt +freedmen." pg3200.txt +freedom pg31100.txt, pg3200.txt +freedom! pg100.txt +freedom!" pg3200.txt +freedom, pg3200.txt, pg100.txt +freedom. pg31100.txt, pg3200.txt, pg100.txt +freedom; pg3200.txt, pg100.txt +freelier pg100.txt +freely pg3200.txt, pg100.txt +freely, pg31100.txt, pg3200.txt, pg100.txt +freely. pg31100.txt, pg3200.txt, pg100.txt +freely; pg100.txt +freely? pg100.txt +freeman pg3200.txt +freeman's pg3200.txt +freeman, pg3200.txt +freeman." pg3200.txt +freemason." pg3200.txt +freemasonry." pg3200.txt +freemen pg3200.txt +freemen, pg3200.txt +freemen. pg3200.txt +freemen." pg3200.txt +freer pg3200.txt, pg100.txt +freer, pg3200.txt +freethinker. pg3200.txt +freethinkers' pg3200.txt +freethinkers, pg3200.txt +freethinkers. pg3200.txt +freeze pg3200.txt, pg100.txt +freeze, pg100.txt +freeze. pg100.txt +freeze: pg100.txt +freezes. pg100.txt +freezing pg3200.txt +freezing. pg3200.txt +freezing." pg3200.txt +freiburg, pg3200.txt +freight pg3200.txt +freight, pg3200.txt +freight," pg3200.txt +freight--scene pg3200.txt +freight-cars pg3200.txt +freight-rates, pg3200.txt +freight-trip pg3200.txt +freight. pg3200.txt +freight; pg3200.txt +freighted pg3200.txt +freights pg3200.txt +freind pg31100.txt, pg3200.txt +freind, pg31100.txt +freind. pg31100.txt +freinds pg31100.txt +freinds, pg31100.txt +freindship pg31100.txt +freindship. pg31100.txt +fren' pg3200.txt +fren'." pg3200.txt +french pg31100.txt, pg3200.txt, pg100.txt +french! pg100.txt +french!" pg3200.txt +french, pg31100.txt, pg3200.txt, pg100.txt +french--that pg3200.txt +french. pg3200.txt, pg100.txt +french." pg3200.txt +french.' pg100.txt +french: pg100.txt +french:] pg3200.txt +french; pg31100.txt, pg3200.txt, pg100.txt +french? pg100.txt +french?" pg3200.txt +french] pg3200.txt +frenchman pg3200.txt +frenchman! pg3200.txt +frenchman--in pg3200.txt +frenchman. pg3200.txt +frenchman." pg3200.txt +frenchman.] pg3200.txt +frenchman; pg3200.txt, pg100.txt +frenchman? pg100.txt +frenchmen pg3200.txt +frenchwoman. pg100.txt +frenchwomen pg3200.txt +frenchwomen). pg3200.txt +frenzied pg3200.txt +frenzy pg31100.txt, pg3200.txt +frenzy, pg3200.txt, pg100.txt +frenzy. pg3200.txt, pg100.txt +frequency pg31100.txt, pg3200.txt +frequency, pg3200.txt +frequency. pg3200.txt +frequency: pg3200.txt +frequent pg31100.txt, pg3200.txt, pg100.txt +frequent, pg3200.txt +frequent. pg31100.txt, pg3200.txt +frequent." pg31100.txt +frequented pg31100.txt, pg3200.txt +frequently pg31100.txt, pg3200.txt +frequently, pg3200.txt +frequently--moisten pg3200.txt +frequently. pg3200.txt +frequently." pg3200.txt +frequently; pg3200.txt +frequently?" pg31100.txt +frescoed pg3200.txt +frescoed, pg3200.txt +frescoes pg3200.txt +frescoes, pg3200.txt +fresh pg31100.txt, pg3200.txt, pg100.txt +fresh, pg31100.txt, pg3200.txt, pg100.txt +fresh. pg31100.txt, pg3200.txt, pg100.txt +fresh; pg3200.txt +fresh?" pg31100.txt, pg3200.txt +freshen pg3200.txt +freshened pg31100.txt, pg3200.txt +freshened, pg31100.txt +fresher pg31100.txt +freshet pg3200.txt +freshet, pg3200.txt +freshly pg3200.txt +freshman pg3200.txt +freshness pg31100.txt, pg3200.txt, pg100.txt +freshness. pg3200.txt +fret pg31100.txt, pg3200.txt, pg100.txt +fret. pg100.txt +fret; pg100.txt +fretfulness). pg31100.txt +frets pg31100.txt, pg3200.txt, pg100.txt +frets, pg100.txt +fretted pg3200.txt +fretted--5. pg3200.txt +fretting pg3200.txt +fretting, pg3200.txt +freunden, pg3200.txt +freundes--well, pg3200.txt +freundlich pg3200.txt +freundschaftsbezeigungen. pg3200.txt +freundschaftsbezeigungenstadtverordneten- pg3200.txt +fri'nd.'" pg3200.txt +friar pg3200.txt, pg100.txt +friar! pg100.txt +friar's pg100.txt +friar, pg3200.txt, pg100.txt +friar. pg100.txt +friar? pg100.txt +friars' pg3200.txt +friday pg31100.txt, pg3200.txt, pg100.txt +friday, pg31100.txt, pg3200.txt +friday. pg3200.txt +friday." pg3200.txt +friday; pg31100.txt +friday?" pg3200.txt +fried pg3200.txt +friend pg31100.txt, pg3200.txt, pg100.txt +friend! pg31100.txt, pg3200.txt, pg100.txt +friend!" pg3200.txt +friend's pg31100.txt, pg3200.txt, pg100.txt +friend) pg100.txt +friend, pg31100.txt, pg3200.txt, pg100.txt +friend," pg3200.txt +friend,-- pg31100.txt +friend,--but pg31100.txt +friend- pg100.txt +friend--" pg31100.txt +friend--i pg31100.txt +friend--look pg3200.txt +friend. pg31100.txt, pg3200.txt, pg100.txt +friend." pg31100.txt, pg3200.txt +friend.' pg3200.txt, pg100.txt +friend.--she pg31100.txt +friend: pg31100.txt, pg3200.txt, pg100.txt +friend; pg31100.txt, pg3200.txt, pg100.txt +friend? pg3200.txt, pg100.txt +friend?" pg31100.txt, pg3200.txt +friend?- pg100.txt +friended pg100.txt +friended. pg100.txt +friendless pg31100.txt, pg3200.txt +friendless, pg3200.txt +friendlessness. pg3200.txt +friendliest pg3200.txt +friendliness pg31100.txt, pg3200.txt, pg100.txt +friendliness. pg3200.txt +friendly pg31100.txt, pg3200.txt, pg100.txt +friendly, pg31100.txt, pg3200.txt +friendly. pg31100.txt, pg3200.txt, pg100.txt +friendly." pg3200.txt +friendly; pg31100.txt +friendly? pg100.txt +friends pg31100.txt, pg3200.txt, pg100.txt +friends! pg3200.txt, pg100.txt +friends!" pg31100.txt +friends!) pg31100.txt +friends' pg31100.txt, pg3200.txt +friends'; pg100.txt +friends, pg31100.txt, pg3200.txt, pg100.txt +friends," pg31100.txt, pg3200.txt +friends- pg100.txt +friends--for pg31100.txt +friends. pg31100.txt, pg3200.txt, pg100.txt +friends." pg31100.txt, pg3200.txt, pg100.txt +friends.' pg100.txt +friends: pg100.txt +friends; pg31100.txt, pg3200.txt, pg100.txt +friends? pg31100.txt, pg100.txt +friends?" pg3200.txt +friendship pg31100.txt, pg3200.txt, pg100.txt +friendship! pg100.txt +friendship, pg31100.txt, pg3200.txt, pg100.txt +friendship. pg31100.txt, pg3200.txt, pg100.txt +friendship." pg31100.txt, pg3200.txt +friendship."-- pg31100.txt +friendship.' pg100.txt +friendship: pg3200.txt +friendship; pg31100.txt, pg100.txt +friendship? pg100.txt +friendship?" pg3200.txt +friendships. pg3200.txt +fries, pg3200.txt +frietchie."' pg3200.txt +frieze, pg100.txt +frieze; pg100.txt +fright pg31100.txt, pg3200.txt, pg100.txt +fright, pg31100.txt, pg3200.txt +fright. pg3200.txt +fright." pg31100.txt +fright; pg3200.txt +frighted pg31100.txt, pg100.txt +frighten pg31100.txt, pg3200.txt +frightened pg31100.txt, pg3200.txt +frightened, pg31100.txt, pg3200.txt +frightful pg31100.txt, pg3200.txt +frightful. pg3200.txt +frights pg100.txt +frigid pg3200.txt +frilled pg3200.txt +frills pg3200.txt +frinds?" pg3200.txt +fringe pg3200.txt +fringe-tree." pg3200.txt +fringe. pg3200.txt +fringe; pg31100.txt, pg3200.txt +fringed pg3200.txt +fringes pg3200.txt +frisbie pg3200.txt +frisbie, pg3200.txt +frisket pg3200.txt +friskiness. pg3200.txt +frisking pg3200.txt +frisky; pg3200.txt +fritters. pg3200.txt +frivolities, pg3200.txt +frivolities; pg3200.txt +frivolity pg3200.txt +frivolous pg31100.txt, pg3200.txt +frivolous. pg3200.txt, pg100.txt +fro, pg3200.txt +fro--tis pg3200.txt +frock pg3200.txt +frock. pg3200.txt +frock." pg3200.txt +frocks pg3200.txt +frocks--. pg31100.txt +frog pg3200.txt +frog" pg3200.txt +frog"? pg3200.txt +frog"?' pg3200.txt +frog, pg3200.txt, pg100.txt +frog," pg3200.txt +frog. pg3200.txt +frog." pg3200.txt +frog.' pg3200.txt +frog.[1] pg3200.txt +frog; pg3200.txt +froggy pg3200.txt +frogmore pg100.txt +frogmore. pg100.txt +frogmore; pg100.txt +frogs pg3200.txt +frogs, pg3200.txt +frogs," pg3200.txt +frogs. pg3200.txt +frohman pg3200.txt +froid. pg3200.txt +frolic. pg3200.txt +frolicking pg3200.txt +from! pg3200.txt +from's pg100.txt +from't pg100.txt +from't. pg100.txt +from, pg31100.txt, pg3200.txt, pg100.txt +from- pg100.txt +from--" pg31100.txt, pg3200.txt +from----" pg3200.txt +from. pg31100.txt, pg3200.txt, pg100.txt +from." pg31100.txt, pg3200.txt +from; pg31100.txt, pg3200.txt +from? pg31100.txt, pg3200.txt, pg100.txt +from?" pg31100.txt, pg3200.txt +from?' pg3200.txt +fronded pg3200.txt +front pg31100.txt, pg3200.txt, pg100.txt +front! pg3200.txt +front), pg3200.txt +front, pg31100.txt, pg3200.txt, pg100.txt +front- pg100.txt +front. pg3200.txt, pg100.txt +front." pg31100.txt, pg3200.txt +front; pg3200.txt +front?" pg3200.txt +frontage pg3200.txt +frontage. pg3200.txt +frontages pg3200.txt +fronte pg3200.txt +fronte--" pg3200.txt +fronted pg3200.txt +frontier pg3200.txt +frontier--i pg3200.txt +frontier--of pg3200.txt +frontier. pg3200.txt +frontier? pg100.txt +frontiers. pg3200.txt +frontiers; pg3200.txt +fronts pg31100.txt, pg3200.txt, pg100.txt +frontyard pg3200.txt +frost pg31100.txt, pg3200.txt, pg100.txt +frost, pg3200.txt, pg100.txt +frost-work pg3200.txt +frost. pg31100.txt, pg3200.txt, pg100.txt +frostily pg3200.txt +frosts pg31100.txt, pg3200.txt, pg100.txt +frosty pg31100.txt, pg3200.txt +frosty. pg3200.txt +froth pg100.txt +froth, pg100.txt +froth? pg100.txt +froth] pg100.txt +frothed pg3200.txt +froward, pg100.txt +froward. pg100.txt +frown pg31100.txt, pg3200.txt, pg100.txt +frown, pg31100.txt, pg100.txt +frown. pg100.txt +frown; pg100.txt +frowned pg3200.txt +frowned, pg3200.txt +frowning pg3200.txt +frowning, pg3200.txt +frowning. pg100.txt +frowningly. pg100.txt +frowns pg100.txt +frowns, pg3200.txt, pg100.txt +frowns- pg100.txt +frowns. pg100.txt +froze pg3200.txt +frozen pg3200.txt +frozen, pg3200.txt +frozen--then pg3200.txt +fructify pg3200.txt +fruit pg31100.txt, pg3200.txt, pg100.txt +fruit, pg3200.txt, pg100.txt +fruit-trees pg31100.txt +fruit. pg3200.txt, pg100.txt +fruit." pg3200.txt +fruit; pg3200.txt +fruitful pg3200.txt, pg100.txt +fruitful--perpetually; pg3200.txt +fruitful. pg100.txt +fruitful." pg3200.txt +fruition pg3200.txt +fruition; pg3200.txt +fruits pg31100.txt, pg3200.txt +fruits. pg3200.txt +frustration. pg3200.txt +fry! pg100.txt +fry--not pg3200.txt +fry. pg3200.txt, pg100.txt +frye, pg3200.txt +frying-pan pg3200.txt +frying-pan. pg3200.txt +ft. pg3200.txt +fuad pg3200.txt +fuddle pg3200.txt +fuegans, pg3200.txt +fuel pg3200.txt +fuel, pg100.txt +fuel: pg3200.txt +fugitive pg3200.txt +fugitive! pg100.txt +fugitive. pg3200.txt +fugitive? pg100.txt +fugitives pg3200.txt +fugitives. pg31100.txt +fuhren. pg3200.txt +fulbert pg3200.txt +fulbert--penurious. pg3200.txt +fulbert; pg3200.txt +fulfil pg31100.txt +fulfil. pg100.txt +fulfill pg3200.txt +fulfill'd pg100.txt +fulfill'd! pg100.txt +fulfill'd. pg100.txt +fulfill'd: pg100.txt +fulfill. pg3200.txt +fulfilled pg3200.txt +fulfilled, pg3200.txt +fulfilled. pg3200.txt +fulfilleth pg3200.txt +fulfilling. pg3200.txt +fulfillment pg3200.txt +full pg31100.txt, pg3200.txt, pg100.txt +full! pg3200.txt, pg100.txt +full, pg3200.txt, pg100.txt +full--and pg3200.txt +full--there pg3200.txt +full-blown pg31100.txt +full-gorg'd, pg100.txt +full-grown pg3200.txt +full-hearted, pg100.txt +full. pg3200.txt, pg100.txt +full." pg3200.txt +full: pg3200.txt +full; pg3200.txt +fullam pg100.txt +fuller pg3200.txt +fuller, pg3200.txt +fuller. pg3200.txt +fuller: pg3200.txt +fullerton pg31100.txt +fullerton, pg31100.txt +fullerton. pg31100.txt +fullerton?" pg31100.txt +fullest pg31100.txt, pg3200.txt +fully pg31100.txt, pg3200.txt, pg100.txt +fully. pg31100.txt, pg3200.txt +fully?" pg3200.txt +fulness pg100.txt +fulness, pg100.txt +fulsome pg3200.txt +fulsomely pg3200.txt +fulton pg3200.txt +fulton, pg3200.txt +fulton. pg3200.txt +fulvia, pg100.txt +fulvia? pg100.txt +fum! pg100.txt +fumbled pg3200.txt +fumbling pg3200.txt +fume pg3200.txt +fumes. pg3200.txt +fumigators. pg3200.txt +fuming pg3200.txt +fumitory, pg100.txt +fun pg3200.txt +fun! pg31100.txt, pg3200.txt +fun!" pg3200.txt +fun, pg3200.txt +fun--but pg3200.txt +fun--it pg3200.txt +fun--such pg3200.txt +fun. pg31100.txt, pg3200.txt, pg100.txt +fun; pg3200.txt +function pg3200.txt, pg100.txt +function, pg3200.txt, pg100.txt +function. pg3200.txt, pg100.txt +function.) pg3200.txt +functionable: pg3200.txt +functional. pg3200.txt +functions pg3200.txt, pg100.txt +functions, pg100.txt +functions--the pg3200.txt +functions. pg3200.txt +functions; pg3200.txt +fund pg3200.txt +fund! pg3200.txt +fund. pg3200.txt +fund." pg3200.txt +funds pg3200.txt +funds, pg3200.txt +funds. pg3200.txt +funeral pg3200.txt +funeral, pg3200.txt, pg100.txt +funeral- pg100.txt +funeral. pg3200.txt, pg100.txt +funeral." pg3200.txt +funeral.' pg3200.txt +funeral; pg3200.txt, pg100.txt +funeral? pg3200.txt +funerals pg3200.txt +funerals, pg3200.txt +funerals. pg3200.txt, pg100.txt +funerals." pg3200.txt +funerals; pg3200.txt, pg100.txt +funfzehn pg3200.txt +fungus pg3200.txt +funk, pg3200.txt +funniest pg3200.txt +funniness pg3200.txt +funny pg3200.txt +funny, pg3200.txt +funny,--yes, pg3200.txt +funny--because pg3200.txt +funny-in pg3200.txt +funny. pg3200.txt +funny." pg3200.txt +fur pg3200.txt +fur, pg3200.txt +fur- pg3200.txt +furbish pg3200.txt +furbishing pg3200.txt +furggenbach, pg3200.txt +furies, pg3200.txt, pg100.txt +furious pg3200.txt, pg100.txt +furious, pg3200.txt, pg100.txt +furious. pg100.txt +furious; pg100.txt +furiously pg3200.txt +furiously; pg3200.txt +furled pg3200.txt +furlong pg31100.txt, pg3200.txt +furlongs pg100.txt +furlough. pg3200.txt +furlough." pg3200.txt +furnace pg3200.txt +furnace, pg3200.txt +furnaces pg3200.txt, pg100.txt +furnaces, pg3200.txt +furnaces. pg3200.txt +furnish pg31100.txt, pg3200.txt, pg100.txt +furnish'd pg100.txt +furnish'd. pg100.txt +furnish, pg31100.txt +furnish. pg3200.txt +furnished pg31100.txt, pg3200.txt +furnished, pg3200.txt +furnished," pg3200.txt +furnished-- pg3200.txt +furnished--facts pg3200.txt +furnished. pg3200.txt +furnished; pg3200.txt +furnisher pg3200.txt +furnishes pg3200.txt +furnishing pg3200.txt +furnishing. pg3200.txt +furnishing." pg3200.txt +furnishings pg3200.txt +furnishings- pg100.txt +furniture pg31100.txt, pg3200.txt, pg100.txt +furniture, pg31100.txt, pg3200.txt, pg100.txt +furniture-dealing pg3200.txt +furniture. pg31100.txt, pg3200.txt, pg100.txt +furniture; pg3200.txt +furr'd pg100.txt +furrowed pg3200.txt +furry pg3200.txt +furs--seal, pg3200.txt +furs--they pg3200.txt +furs. pg3200.txt +further pg31100.txt, pg3200.txt, pg100.txt +further, pg31100.txt, pg3200.txt, pg100.txt +further--that pg3200.txt +further. pg31100.txt, pg3200.txt, pg100.txt +further." pg31100.txt, pg3200.txt +further: pg3200.txt +further; pg3200.txt, pg100.txt +further? pg100.txt +further?' pg3200.txt +furtherance? pg100.txt +furthermore pg3200.txt +furthermore, pg3200.txt, pg100.txt +furthest pg3200.txt +furthest, pg3200.txt +furthest. pg3200.txt +furthest." pg3200.txt +furtive pg3200.txt +furtively pg3200.txt +fury pg3200.txt, pg100.txt +fury, pg31100.txt, pg3200.txt, pg100.txt +fury,exceeds pg100.txt +fury. pg3200.txt, pg100.txt +fury; pg100.txt +fury? pg100.txt +furze, pg100.txt +fuse pg3200.txt +fuse, pg3200.txt +fuse--the pg3200.txt +fuse--which pg3200.txt +fuse. pg3200.txt +fuse?" pg3200.txt +fuss pg3200.txt +fuss, pg3200.txt +fuss-up pg3200.txt +fussiness pg3200.txt +fussy pg3200.txt +fust pg3200.txt +fust!" pg3200.txt +fust-born pg3200.txt +fustian, pg100.txt +fustilarian! pg100.txt +future pg31100.txt, pg3200.txt, pg100.txt +future! pg3200.txt +future, pg31100.txt, pg3200.txt +future--of pg3200.txt +future. pg31100.txt, pg3200.txt +future." pg3200.txt +future.' pg3200.txt +future; pg3200.txt +future?" pg3200.txt +futures, pg3200.txt +futurity pg100.txt +g'en`eve." pg3200.txt +g******, pg3200.txt +g*****, pg3200.txt +g, pg100.txt +g--, pg31100.txt +g---.' pg3200.txt +g---?' pg3200.txt +g--slept pg31100.txt +g-o-t--m-y--g-o-l-d-e-n pg3200.txt +g. pg3200.txt +g., pg3200.txt +g.--"what!" pg3200.txt +ga, pg3200.txt +gabble pg3200.txt, pg100.txt +gabble!--kind pg3200.txt +gaberdine, pg100.txt +gaberdine; pg100.txt +gable pg3200.txt +gables, pg3200.txt +gabrilowitsch, pg3200.txt +gad-about, pg3200.txt +gad; pg31100.txt +gadding pg3200.txt +gadding? pg100.txt +gadsby!" pg3200.txt +gadsby's pg3200.txt +gadsby's, pg3200.txt +gadsby's.' pg3200.txt +gadshill pg100.txt +gadshill. pg100.txt +gag pg3200.txt +gag, pg3200.txt +gag: pg3200.txt +gage pg100.txt +gage, pg100.txt +gage. pg100.txt +gagged, pg3200.txt +gagnait. pg3200.txt +gagner pg3200.txt +gaheris pg3200.txt +gaheris, pg3200.txt +gaieties pg31100.txt +gaiety pg31100.txt, pg3200.txt +gaiety, pg31100.txt, pg3200.txt +gaiety. pg3200.txt +gaily pg3200.txt +gaily, pg31100.txt +gaily: pg3200.txt +gain pg31100.txt, pg3200.txt, pg100.txt +gain! pg100.txt +gain'd, pg100.txt +gain, pg3200.txt, pg100.txt +gain. pg31100.txt, pg3200.txt, pg100.txt +gain; pg3200.txt, pg100.txt +gain? pg3200.txt +gainable pg3200.txt +gained pg31100.txt, pg3200.txt +gained) pg3200.txt +gained, pg31100.txt +gained. pg31100.txt +gained; pg31100.txt +gainer pg3200.txt +gainer." pg31100.txt +gaingiving pg100.txt +gaining pg31100.txt, pg3200.txt +gaining!" pg3200.txt +gaining, pg3200.txt +gaining. pg3200.txt +gaining?" pg31100.txt +gains pg3200.txt +gains. pg100.txt +gains? pg100.txt +gainsaid; pg100.txt +gainsay pg100.txt +gainsaying. pg100.txt +gait pg3200.txt, pg100.txt +gait, pg3200.txt, pg100.txt +gait," pg3200.txt +gait. pg3200.txt, pg100.txt +gait.' pg100.txt +gait: pg100.txt +gait; pg3200.txt, pg100.txt +gait? pg100.txt +gaits pg3200.txt +gal?" pg3200.txt +gala pg3200.txt +gala-days.' pg3200.txt +galahad, pg3200.txt +galata pg3200.txt +galaxy pg3200.txt +galaxy, pg3200.txt +galaxy. pg3200.txt +gale pg100.txt +gale! pg3200.txt +galen pg3200.txt +gales pg3200.txt +gales, pg100.txt +galihud pg3200.txt +galilee! pg3200.txt +galilee, pg3200.txt +galilee. pg3200.txt +galilee.** pg3200.txt +galileo--an pg3200.txt +galis pg3200.txt +galis, pg3200.txt +gall pg100.txt +gall! pg100.txt +gall, pg100.txt +gall. pg100.txt +gall; pg100.txt +gallant pg31100.txt, pg3200.txt, pg100.txt +gallant, pg100.txt +gallant-looking pg3200.txt +gallant. pg31100.txt +gallant; pg100.txt +gallant? pg100.txt +gallantly pg3200.txt +gallantry pg31100.txt, pg3200.txt, pg100.txt +gallantry, pg31100.txt, pg3200.txt +gallantry. pg31100.txt +gallantry." pg31100.txt +gallants pg3200.txt, pg100.txt +gallants, pg100.txt +galled, pg3200.txt +galleries pg3200.txt +galleries!" pg3200.txt +galleries, pg3200.txt +galleries? pg3200.txt +galleries?" pg31100.txt +gallery pg31100.txt, pg3200.txt, pg100.txt +gallery, pg31100.txt, pg3200.txt +gallery. pg31100.txt, pg3200.txt +gallery." pg3200.txt +gallery; pg3200.txt +galley pg3200.txt +galley-slaves! pg3200.txt +galley-west; pg3200.txt +galleys pg100.txt +galleys. pg100.txt +gallia pg100.txt +gallia, pg100.txt +gallia? pg100.txt +galliard. pg100.txt +galliasses, pg100.txt +galling pg3200.txt +gallon pg3200.txt +gallon. pg3200.txt +gallons pg3200.txt +gallop, pg3200.txt +gallop. pg3200.txt, pg100.txt +galloped pg3200.txt +galloping pg3200.txt +galloping. pg3200.txt +gallows pg100.txt +gallows! pg3200.txt +gallows, pg3200.txt +gallows-builders pg3200.txt +gallows. pg3200.txt, pg100.txt +galls pg100.txt +galls, pg100.txt +gallus pg100.txt +gallus, pg100.txt +galvanized pg3200.txt +galveston pg3200.txt +gamba pg3200.txt +gambier,.......300 pg3200.txt +gambier-adelaide,........270 pg3200.txt +gamble pg3200.txt +gamble, pg3200.txt +gambler. pg3200.txt +gamblers pg3200.txt +gamblers' pg3200.txt +gamblers." pg3200.txt +gambles." pg3200.txt +gambling-hall, pg3200.txt +gambling. pg3200.txt +gambol pg100.txt +game pg31100.txt, pg3200.txt, pg100.txt +game), pg3200.txt +game, pg31100.txt, pg3200.txt, pg100.txt +game-bag pg3200.txt +game-cocks pg3200.txt +game-cocks!" pg3200.txt +game. pg31100.txt, pg3200.txt, pg100.txt +game." pg31100.txt, pg3200.txt +game.' pg3200.txt +game: pg3200.txt +game; pg3200.txt, pg100.txt +game?' pg3200.txt +gamebag. pg3200.txt +games pg3200.txt +games, pg3200.txt +games. pg3200.txt, pg100.txt +gamester!" pg31100.txt +gamester, pg100.txt +gamester. pg100.txt +gaming pg3200.txt +gang pg3200.txt +gang!" pg3200.txt +gang's pg3200.txt +gang, pg3200.txt +gang. pg31100.txt, pg3200.txt +gang." pg3200.txt +gang?" pg3200.txt +ganges pg3200.txt +ganges, pg3200.txt +ganges--architecture pg3200.txt +ganges. pg3200.txt +gangs pg3200.txt +gangs, pg3200.txt +gangway, pg3200.txt +gangways--a pg3200.txt +gantlet; pg3200.txt +ganymede! pg100.txt +ganymede. pg100.txt +ganz pg3200.txt +gaol! pg100.txt +gaol. pg100.txt +gaol? pg100.txt +gaoler pg100.txt +gaolers pg100.txt +gaols pg100.txt +gap pg3200.txt, pg100.txt +gap! pg3200.txt +gap, pg3200.txt +gape pg31100.txt, pg100.txt +gaped pg3200.txt +gaping. pg100.txt +gaptain? pg3200.txt +gar pg3200.txt +gar, pg100.txt +garb pg3200.txt, pg100.txt +garb, pg100.txt +garbage, pg3200.txt +garbage. pg100.txt +garden pg31100.txt, pg3200.txt, pg100.txt +garden! pg3200.txt +garden, pg31100.txt, pg3200.txt, pg100.txt +garden-house pg100.txt +garden-house, pg100.txt +garden-truck. pg3200.txt +garden. pg31100.txt, pg3200.txt, pg100.txt +gardener. pg100.txt +gardeners pg100.txt +gardeners. pg3200.txt, pg100.txt +gardeners; pg100.txt +gardenia, pg3200.txt +gardens pg31100.txt, pg3200.txt +gardens, pg31100.txt, pg3200.txt, pg100.txt +gardens--the pg3200.txt +gardens. pg3200.txt +gardens; pg31100.txt +garder pg3200.txt +gardiner pg31100.txt, pg100.txt +gardiner's pg31100.txt, pg3200.txt +gardiner, pg31100.txt, pg3200.txt +gardiner. pg31100.txt, pg100.txt +gardiner." pg31100.txt +gardiner? pg31100.txt, pg100.txt +gardon! pg100.txt +gareth pg3200.txt +gareth, pg3200.txt +gareth--both!" pg3200.txt +garfield pg3200.txt +garfield. pg3200.txt +gargle pg3200.txt +gargrave pg100.txt +gargrave, pg100.txt +garibaldi. pg3200.txt +garish pg3200.txt +garland pg100.txt +garland, pg100.txt +garland--" pg3200.txt +garland. pg100.txt +garland; pg100.txt +garlanded pg3200.txt +garlands pg3200.txt, pg100.txt +garlands! pg100.txt +garlic, pg100.txt +garlic-eaters! pg100.txt +garlot. pg3200.txt +garment pg3200.txt, pg100.txt +garment'! pg100.txt +garment, pg3200.txt +garment. pg3200.txt +garments pg3200.txt, pg100.txt +garments, pg100.txt +garments. pg100.txt +garments? pg100.txt +garnham, pg3200.txt +garnish, pg100.txt +garnished pg3200.txt, pg100.txt +garret pg3200.txt +garret! pg3200.txt +garret; pg3200.txt +garrets pg3200.txt +garrick pg3200.txt +garrison pg3200.txt +garrison'd. pg100.txt +garrison. pg100.txt +garrison; pg3200.txt +garrisoned. pg3200.txt +garrisons pg3200.txt +garrisons, pg3200.txt, pg100.txt +garroted, pg3200.txt +garroters--" pg3200.txt +garter pg3200.txt, pg100.txt +garter! pg100.txt +garter, pg100.txt +garter. pg100.txt +garter? pg100.txt +garters! pg100.txt +garth, pg3200.txt +gas pg3200.txt +gas, pg3200.txt +gascoigne. pg31100.txt +gascony pg100.txt +gash pg3200.txt, pg100.txt +gashed pg3200.txt +gashes pg100.txt +gashly. pg3200.txt +gashly." pg3200.txt +gashly?" pg3200.txt +gashlys pg3200.txt +gasket, pg3200.txt +gasometer. pg3200.txt +gasp pg3200.txt +gasp, pg3200.txt, pg100.txt +gasp. pg3200.txt, pg100.txt +gasped pg3200.txt +gasped. pg3200.txt +gasped: pg3200.txt +gasping pg3200.txt +gasping, pg3200.txt +gasping.) pg3200.txt +gaspings pg3200.txt +gasps pg3200.txt +gasternthal pg3200.txt +gastritis. pg3200.txt +gate pg31100.txt, pg3200.txt, pg100.txt +gate'? pg100.txt +gate, pg3200.txt, pg100.txt +gate-post, pg3200.txt +gate. pg31100.txt, pg3200.txt, pg100.txt +gate; pg100.txt +gate? pg100.txt +gates pg31100.txt, pg3200.txt, pg100.txt +gates!" pg3200.txt +gates, pg3200.txt, pg100.txt +gates,--"now pg31100.txt +gates. pg31100.txt, pg100.txt +gates: pg100.txt +gates; pg100.txt +gates? pg3200.txt +gates?'" pg3200.txt +gates] pg100.txt +gateway pg3200.txt +gateway, pg3200.txt +gather pg3200.txt, pg100.txt +gather'd pg100.txt +gather, pg3200.txt +gather--still pg3200.txt +gather; pg100.txt +gathered pg31100.txt, pg3200.txt +gathered, pg3200.txt +gathered. pg3200.txt, pg100.txt +gathering pg31100.txt, pg3200.txt +gathering, pg3200.txt +gathering--dimly pg3200.txt +gathering. pg3200.txt +gathering." pg31100.txt +gathering: pg3200.txt +gathering; pg3200.txt +gatherings pg3200.txt +gatherings, pg3200.txt +gathers pg3200.txt +gatling pg3200.txt +gatling, pg3200.txt +gatlings?" pg3200.txt +gaud pg100.txt +gaudens pg3200.txt +gaudeo. pg100.txt +gaudiest pg3200.txt +gauds; pg3200.txt +gaudy pg3200.txt +gaudy, pg3200.txt +gaudy. pg3200.txt +gaudy; pg100.txt +gauge pg3200.txt, pg100.txt +gauge- pg3200.txt +gauge-cocks). pg3200.txt +gauging pg3200.txt +gaul pg3200.txt +gaultree pg100.txt +gauming pg3200.txt +gaunt pg100.txt +gaunt! pg100.txt +gaunt, pg100.txt +gaunt. pg100.txt +gaunt? pg100.txt +gauntlet." pg3200.txt +gauntlet] pg100.txt +gauntlets, pg3200.txt +gauntlets. pg100.txt +gauze. pg3200.txt +gauzy pg3200.txt +gave pg31100.txt, pg3200.txt, pg100.txt +gave, pg3200.txt, pg100.txt +gave. pg31100.txt, pg100.txt +gavel pg3200.txt +gawaine pg3200.txt +gawaine's pg3200.txt +gawaine, pg3200.txt +gawaine. pg3200.txt +gawds pg100.txt +gawds, pg100.txt +gawsey. pg100.txt +gay pg31100.txt, pg3200.txt +gay, pg31100.txt, pg3200.txt, pg100.txt +gay. pg31100.txt, pg3200.txt +gay: pg100.txt +gay? pg100.txt +gayety pg3200.txt +gayly pg3200.txt +gaze pg31100.txt, pg3200.txt, pg100.txt +gaze! pg100.txt +gaze, pg3200.txt, pg100.txt +gaze. pg3200.txt, pg100.txt +gazed pg31100.txt, pg3200.txt +gazed! pg3200.txt +gazed, pg3200.txt +gazes pg100.txt +gazes. pg100.txt +gazeth, pg100.txt +gazetted pg3200.txt +gazing pg3200.txt +gazing, pg3200.txt +gazing. pg100.txt +gbnewby@pglaf.org pg31100.txt, pg3200.txt, pg100.txt +gear pg3200.txt, pg100.txt +gear! pg100.txt +gear, pg100.txt +gear. pg3200.txt, pg100.txt +gear; pg100.txt +geben! pg3200.txt +gebetet: pg3200.txt +gedicht? pg3200.txt +gee! pg3200.txt +geelong pg3200.txt +geeminy! pg3200.txt +geese pg3200.txt, pg100.txt +geese, pg3200.txt, pg100.txt +gefallt. pg3200.txt +geffrey pg100.txt +gegangen-- pg3200.txt +gegenseitigengeldbeitragendenverhaltnismassigkeiten pg3200.txt +gehabt pg3200.txt +gehabt! pg3200.txt +gehabt, pg3200.txt +gehabt. pg3200.txt +gehe. pg3200.txt +gehe? pg3200.txt +gehen. pg3200.txt +gehort. pg3200.txt +gehort? pg3200.txt +geht pg3200.txt +gekauft. pg3200.txt +gekommen. pg3200.txt +gekostet. pg3200.txt +geld pg100.txt +gelding pg100.txt +gem pg3200.txt, pg100.txt +gem, pg3200.txt +gem. pg3200.txt +gemmi pg3200.txt +gems pg3200.txt, pg100.txt +gems, pg3200.txt, pg100.txt +gems--5. pg3200.txt +gems--7. pg3200.txt +gems--and pg3200.txt +gems--gems pg3200.txt +gems. pg3200.txt +gems: pg100.txt +gems; pg3200.txt +gen'ally--and pg3200.txt +gen--" pg3200.txt +gen. pg3200.txt +genders, pg3200.txt +genders? pg100.txt +general pg31100.txt, pg3200.txt, pg100.txt +general! pg100.txt +general!" pg3200.txt +general'? pg100.txt +general's pg31100.txt, pg3200.txt, pg100.txt +general's? pg31100.txt +general) pg3200.txt +general, pg31100.txt, pg3200.txt, pg100.txt +general- pg100.txt +general--" pg3200.txt +general-in-chief pg3200.txt +general-in-chief, pg3200.txt +general. pg31100.txt, pg3200.txt, pg100.txt +general." pg31100.txt, pg3200.txt, pg100.txt +general.) pg3200.txt +general: pg3200.txt +general; pg31100.txt, pg100.txt +general? pg3200.txt, pg100.txt +general?" pg31100.txt +generalities.] pg3200.txt +generality pg31100.txt +generalize pg3200.txt +generalizing pg3200.txt +generalizing--which pg3200.txt +generally pg31100.txt, pg3200.txt +generally, pg100.txt +generally. pg3200.txt +generally." pg3200.txt +generally? pg100.txt +generals pg3200.txt +generals, pg3200.txt +generals. pg3200.txt, pg100.txt +generals: pg100.txt +generalship pg3200.txt +generalship. pg3200.txt +generalstaatsverordnetenversammlungen. pg3200.txt +generation pg31100.txt, pg3200.txt, pg100.txt +generation. pg3200.txt, pg100.txt +generation." pg3200.txt +generations pg31100.txt, pg3200.txt +generations, pg3200.txt +generations. pg31100.txt, pg3200.txt, pg100.txt +generations." pg31100.txt, pg3200.txt +generations? pg3200.txt +generl pg3200.txt +generly pg3200.txt +generly. pg3200.txt +generosities pg3200.txt +generosities. pg3200.txt +generosity pg31100.txt, pg3200.txt, pg100.txt +generosity-- pg3200.txt +generosity. pg31100.txt, pg3200.txt +generosity." pg3200.txt +generosity; pg31100.txt +generous pg31100.txt, pg3200.txt +generous, pg31100.txt, pg3200.txt +generous-hearted pg3200.txt +generous. pg3200.txt +generously pg3200.txt +generously. pg3200.txt +genessaret pg3200.txt +genessaret--enthusiasm pg3200.txt +geneva pg3200.txt +geneva, pg3200.txt +geneva?" pg3200.txt +genie, pg3200.txt +genies pg3200.txt +genies, pg3200.txt +genii pg3200.txt +genitivo, pg100.txt +genius pg31100.txt, pg3200.txt, pg100.txt +genius! pg3200.txt +genius, pg31100.txt, pg3200.txt +genius--a pg3200.txt +genius--i pg3200.txt +genius. pg3200.txt +genius; pg3200.txt +genius?" pg31100.txt +geniuses pg3200.txt +genlman's pg3200.txt +genlmen pg3200.txt +genlmen?" pg3200.txt +gennin. pg3200.txt +genoa pg3200.txt +genoa, pg100.txt +genoa- pg100.txt +genoa. pg3200.txt, pg100.txt +genoese pg3200.txt +genova----" pg3200.txt +gens.' pg100.txt +gent's pg3200.txt +genteel pg31100.txt +genteel, pg31100.txt +genteelly; pg3200.txt +gentiles; pg3200.txt +gentility pg31100.txt, pg100.txt +gentility!" pg31100.txt +gentility. pg100.txt +gentility." pg31100.txt +gentle pg31100.txt, pg3200.txt, pg100.txt +gentle, pg3200.txt, pg100.txt +gentle- pg100.txt +gentle-kind pg100.txt +gentle. pg31100.txt, pg3200.txt, pg100.txt +gentle: pg3200.txt +gentle; pg100.txt +gentlefolks. pg100.txt +gentleman pg31100.txt, pg3200.txt, pg100.txt +gentleman! pg100.txt +gentleman's pg31100.txt, pg3200.txt +gentleman, pg31100.txt, pg3200.txt, pg100.txt +gentleman," pg31100.txt +gentleman- pg100.txt +gentleman-- pg3200.txt +gentleman--but pg3200.txt +gentleman--we pg3200.txt +gentleman-farmer!" pg31100.txt +gentleman. pg31100.txt, pg3200.txt, pg100.txt +gentleman." pg31100.txt, pg3200.txt +gentleman."-- pg3200.txt +gentleman.' pg100.txt +gentleman.--i pg31100.txt +gentleman.] pg100.txt +gentleman: pg31100.txt, pg3200.txt, pg100.txt +gentleman; pg31100.txt, pg3200.txt, pg100.txt +gentleman? pg100.txt +gentleman?" pg3200.txt +gentlemanlike pg31100.txt +gentlemanlike, pg31100.txt +gentlemanlike. pg31100.txt +gentlemanly pg3200.txt +gentlemen pg31100.txt, pg3200.txt, pg100.txt +gentlemen! pg3200.txt, pg100.txt +gentlemen!" pg3200.txt +gentlemen!' pg3200.txt +gentlemen's pg31100.txt +gentlemen, pg31100.txt, pg3200.txt, pg100.txt +gentlemen- pg100.txt +gentlemen--i pg31100.txt +gentlemen. pg3200.txt, pg100.txt +gentlemen." pg31100.txt, pg3200.txt +gentlemen; pg31100.txt, pg3200.txt, pg100.txt +gentlemen? pg100.txt +gentlemen?" pg3200.txt +gentleness pg31100.txt, pg3200.txt, pg100.txt +gentleness, pg31100.txt, pg3200.txt, pg100.txt +gentleness-- pg31100.txt, pg3200.txt +gentleness. pg100.txt +gentleness: pg3200.txt +gentleness; pg100.txt +gentleness? pg31100.txt +gentler pg100.txt +gentler. pg3200.txt +gentles, pg100.txt +gentles? pg100.txt +gentlest pg31100.txt +gentlewoman pg100.txt +gentlewoman! pg100.txt +gentlewoman, pg31100.txt, pg100.txt +gentlewoman- pg100.txt +gentlewoman. pg100.txt +gentlewoman." pg31100.txt +gentlewoman: pg100.txt +gentlewoman; pg100.txt +gentlewoman? pg100.txt +gentlewomen, pg100.txt +gently pg31100.txt, pg3200.txt, pg100.txt +gently, pg3200.txt +gently-- pg3200.txt +gently. pg31100.txt, pg3200.txt, pg100.txt +gently: pg3200.txt +gentry pg3200.txt, pg100.txt +gentry, pg3200.txt +gentry. pg3200.txt, pg100.txt +gentry? pg100.txt +gents!' pg3200.txt +gents, pg3200.txt +gents?" pg3200.txt +genuine pg31100.txt, pg3200.txt +genuine" pg3200.txt +genuine, pg31100.txt, pg3200.txt +genuine. pg3200.txt +genuine?' pg3200.txt +genuineness, pg3200.txt +genus, pg3200.txt +genuwyne pg3200.txt +genuwyne?" pg3200.txt +geographer, pg3200.txt +geographical pg3200.txt +geographically pg3200.txt +geography pg3200.txt +geography, pg3200.txt +geography.' pg3200.txt +geography; pg3200.txt +geological pg3200.txt +geologist's pg3200.txt +geologists, pg3200.txt +geology, pg3200.txt +geology; pg3200.txt +geometry pg3200.txt +geometry, pg3200.txt +geometry. pg3200.txt +george pg31100.txt, pg3200.txt, pg100.txt +george! pg3200.txt, pg100.txt +george!" pg3200.txt +george!' pg3200.txt, pg100.txt +george's pg3200.txt +george's--which pg3200.txt +george, pg3200.txt, pg100.txt +george. pg3200.txt, pg100.txt +george? pg3200.txt, pg100.txt +georgetown. pg3200.txt +georgetown:-- pg3200.txt +georgia" pg3200.txt +georgia." pg3200.txt +georgia: pg3200.txt +georgiana pg31100.txt +georgiana, pg31100.txt +georgie pg3200.txt +georgie-- pg3200.txt +geraniums pg31100.txt +gerard pg3200.txt +gerhardt pg3200.txt +germ pg3200.txt +german pg3200.txt +german's!' pg3200.txt +german, pg3200.txt +german--i pg3200.txt +german--using pg3200.txt +german. pg3200.txt +german; pg3200.txt +germania! pg3200.txt +germans pg3200.txt +germans, pg3200.txt +germans. pg3200.txt, pg100.txt +germans; pg3200.txt +germans] pg3200.txt +germany pg3200.txt +germany! pg3200.txt +germany's pg3200.txt +germany, pg3200.txt, pg100.txt +germany--as pg3200.txt +germany. pg3200.txt, pg100.txt +germany." pg3200.txt +germany; pg3200.txt +germany? pg3200.txt +germany] pg3200.txt +germs pg3200.txt +gertrude pg3200.txt +gertrude, pg100.txt +gertrude. pg100.txt +gesagt-- pg3200.txt +gesehen. pg3200.txt +gesehen? pg3200.txt +gesprach pg3200.txt +gest pg100.txt +gestein. pg3200.txt +gestern pg3200.txt +gesticulating pg3200.txt +gesticulating, pg3200.txt +gesture pg3200.txt, pg100.txt +gesture, pg31100.txt, pg3200.txt +gesture. pg3200.txt +gesture: pg3200.txt +gestures pg3200.txt +gestures, pg3200.txt +gesturing pg3200.txt +gesturings pg3200.txt +get! pg31100.txt +get'st pg100.txt +get, pg3200.txt, pg100.txt +get- pg100.txt +get--i'll pg3200.txt +get--sleep pg3200.txt +get-at-able, pg3200.txt +get. pg3200.txt, pg100.txt +get." pg3200.txt +get? pg100.txt +gethsemane pg3200.txt +gets pg31100.txt, pg3200.txt, pg100.txt +gets, pg100.txt +gets. pg3200.txt +gettest pg3200.txt +getting pg31100.txt, pg3200.txt +getting- pg100.txt +getting." pg3200.txt +getting? pg3200.txt +gettysburg! pg3200.txt +gewesen pg3200.txt +gewgaws, pg3200.txt +geworden pg3200.txt +geworden! pg3200.txt +ghastliness. pg3200.txt +ghastly pg3200.txt +ghastly, pg3200.txt +ghastly. pg3200.txt +ghat. pg3200.txt +ghauts pg3200.txt +ghee, pg3200.txt +ghibelline pg3200.txt +ghost pg3200.txt, pg100.txt +ghost! pg3200.txt, pg100.txt +ghost!" pg3200.txt +ghost!' pg3200.txt +ghost, pg3200.txt, pg100.txt +ghost," pg3200.txt +ghost--" pg3200.txt +ghost-stuff? pg3200.txt +ghost. pg3200.txt, pg100.txt +ghost?" pg3200.txt +ghosted, pg100.txt +ghostly pg3200.txt +ghosts pg3200.txt, pg100.txt +ghosts, pg3200.txt, pg100.txt +ghosts. pg3200.txt +ghosts." pg3200.txt +ghosts; pg3200.txt +ghun!" pg3200.txt +ghurkas--women pg3200.txt +ghurkas. pg3200.txt +giant pg3200.txt +giant, pg3200.txt +giant," pg3200.txt +giant. pg3200.txt, pg100.txt +giant." pg3200.txt +giantlike? pg100.txt +giants pg3200.txt +giants, pg3200.txt +gib pg100.txt +gibberish? pg31100.txt +gibbet!" pg3200.txt +gibbet, pg100.txt +gibbet. pg3200.txt +gibbets pg100.txt +gibbets, pg100.txt +gibe: pg3200.txt +gibeon, pg3200.txt +giber pg100.txt +gibes pg100.txt +gibraltar pg3200.txt +gibraltar, pg3200.txt +gibraltar--tiresome pg3200.txt +gibraltar-falmouth,..........1,061 pg3200.txt +gibraltar. pg31100.txt, pg3200.txt +gibraltar." pg3200.txt +gibt, pg3200.txt +giddiness pg31100.txt +giddy pg3200.txt, pg100.txt +giddy. pg100.txt +gien, pg3200.txt +gien. pg3200.txt +gift pg31100.txt, pg3200.txt, pg100.txt +gift, pg3200.txt, pg100.txt +gift- pg100.txt +gift--and pg3200.txt +gift--that pg3200.txt +gift. pg31100.txt, pg3200.txt, pg100.txt +gift." pg3200.txt +gift.'" pg31100.txt +gift; pg31100.txt, pg100.txt +gift? pg3200.txt, pg100.txt +gifted pg31100.txt, pg3200.txt +gifted. pg3200.txt +gifts pg3200.txt, pg100.txt +gifts, pg3200.txt, pg100.txt +gifts- pg100.txt +gifts--heaven pg3200.txt +gifts. pg3200.txt, pg100.txt +gifts." pg3200.txt +gifts: pg100.txt +gifts; pg100.txt +gig pg31100.txt +gig, pg31100.txt, pg100.txt +gig. pg31100.txt, pg100.txt +gig." pg3200.txt +gig; pg31100.txt +gig?" pg3200.txt +gigantic pg3200.txt +gigantic, pg3200.txt +giggles pg3200.txt +giggling pg3200.txt +giglets pg100.txt +gihon, pg3200.txt +gilbert pg31100.txt +gilbert." pg31100.txt +gild pg3200.txt +gilded pg3200.txt +gilder pg3200.txt +gilder. pg3200.txt +gilders pg3200.txt +gilding pg31100.txt, pg3200.txt +gilgal"; pg3200.txt +gill pg3200.txt +gillespie pg3200.txt +gillimer--" pg3200.txt +gillis pg3200.txt +gillyvors, pg100.txt +gilmore, pg3200.txt +gilt pg3200.txt, pg100.txt +gilt, pg100.txt +gimacrackery, pg3200.txt +gimcrack pg3200.txt +gimcrackery pg3200.txt +gimlet pg3200.txt +gimme pg3200.txt +gin pg3200.txt +gin-house pg3200.txt +gin-mill!' pg3200.txt +gin. pg100.txt +ginger, pg100.txt +gingerbread pg100.txt +gingerbread, pg31100.txt +gingerbread; pg3200.txt +gingerly? pg100.txt +ginn! pg100.txt +gioucestershire- pg100.txt +gipsies pg100.txt +gipsying pg3200.txt +girardeau pg3200.txt +gird. pg100.txt +girded pg3200.txt +girding pg3200.txt +girdle pg100.txt +girdle. pg100.txt +girdlestone pg3200.txt +girl pg31100.txt, pg3200.txt, pg100.txt +girl! pg31100.txt, pg3200.txt, pg100.txt +girl's pg3200.txt +girl, pg31100.txt, pg3200.txt, pg100.txt +girl-- pg3200.txt +girl--" pg31100.txt +girl--both pg3200.txt +girl--both's pg3200.txt +girl--country-bred pg3200.txt +girl--she pg3200.txt +girl--they're pg3200.txt +girl-boy. pg3200.txt +girl. pg31100.txt, pg3200.txt, pg100.txt +girl." pg31100.txt, pg3200.txt +girl: pg3200.txt +girl; pg31100.txt, pg3200.txt, pg100.txt +girl? pg31100.txt, pg3200.txt, pg100.txt +girl?" pg3200.txt +girlhood pg3200.txt +girlish pg31100.txt, pg3200.txt +girlish, pg3200.txt +girlishly pg3200.txt +girlishness pg3200.txt +girls pg31100.txt, pg3200.txt, pg100.txt +girls!" pg31100.txt +girls' pg3200.txt +girls, pg31100.txt, pg3200.txt +girls--enterprise. pg3200.txt +girls--imitating pg3200.txt +girls--playmates pg3200.txt +girls--she pg3200.txt +girls. pg31100.txt, pg3200.txt, pg100.txt +girls." pg31100.txt, pg3200.txt +girls."-- pg31100.txt +girls.] pg3200.txt +girls: pg3200.txt +girls; pg31100.txt, pg3200.txt +girls?" pg3200.txt +gist pg3200.txt +git pg3200.txt +git-out." pg3200.txt +git?" pg3200.txt +gits pg3200.txt +gitting pg3200.txt +giv'st pg100.txt +give pg31100.txt, pg3200.txt, pg100.txt +give! pg31100.txt, pg100.txt +give!" pg3200.txt +give, pg31100.txt, pg3200.txt, pg100.txt +give-and-take pg3200.txt +give-away. pg3200.txt +give. pg31100.txt, pg3200.txt, pg100.txt +give." pg3200.txt +give: pg100.txt +give; pg31100.txt, pg100.txt +give? pg100.txt +give?" pg3200.txt +given pg31100.txt, pg3200.txt, pg100.txt +given! pg31100.txt, pg100.txt +given, pg31100.txt, pg3200.txt, pg100.txt +given. pg31100.txt, pg3200.txt, pg100.txt +given." pg31100.txt +given; pg31100.txt, pg3200.txt, pg100.txt +given? pg100.txt +givenaught, pg3200.txt +givenaught." pg3200.txt +giver pg3200.txt +giver. pg31100.txt, pg100.txt +givers, pg3200.txt +gives pg31100.txt, pg3200.txt, pg100.txt +gives, pg100.txt +gives. pg3200.txt, pg100.txt +giving pg31100.txt, pg3200.txt, pg100.txt +giving, pg31100.txt, pg3200.txt +gl7nce pg3200.txt +glace pg3200.txt +glace. pg3200.txt +glacier pg3200.txt +glacier, pg3200.txt +glacier. pg3200.txt +glacier." pg3200.txt +glacier] pg3200.txt +glaciers pg3200.txt +glaciers) pg3200.txt +glaciers. pg3200.txt +glaciers." pg3200.txt +glacis.' pg3200.txt +glad pg31100.txt, pg3200.txt, pg100.txt +glad! pg3200.txt +glad, pg3200.txt, pg100.txt +glad--but--but, pg3200.txt +glad-hearted, pg3200.txt +glad. pg3200.txt, pg100.txt +glad." pg3200.txt +glad.) pg3200.txt +glad; pg3200.txt +glad? pg3200.txt +glad?" pg3200.txt +gladder pg3200.txt +gladding pg100.txt +glade, pg3200.txt +glades pg3200.txt +gladiator. pg3200.txt +gladiators! pg3200.txt +gladiators. pg3200.txt +gladly pg31100.txt, pg100.txt +gladly, pg31100.txt, pg3200.txt, pg100.txt +gladly. pg100.txt +gladly." pg3200.txt +gladly; pg3200.txt +gladness pg31100.txt, pg3200.txt, pg100.txt +gladness, pg3200.txt +gladness. pg3200.txt +gladness; pg100.txt +gladsome, pg3200.txt +glamis! pg100.txt +glamis, pg100.txt +glamis; pg100.txt +glanc'd. pg100.txt +glance pg31100.txt, pg3200.txt, pg100.txt +glance' pg3200.txt +glance, pg31100.txt, pg3200.txt +glance--membranous pg3200.txt +glance. pg3200.txt +glance." pg31100.txt, pg3200.txt +glance.' pg31100.txt +glance; pg31100.txt, pg3200.txt, pg100.txt +glanced pg3200.txt +glances pg3200.txt +glances, pg3200.txt +glances. pg3200.txt +glances.--the pg31100.txt +glances; pg3200.txt +glancing pg31100.txt, pg3200.txt +glansdale pg100.txt +glansdale, pg100.txt +glare pg3200.txt +glare, pg3200.txt +glare. pg3200.txt +glare: pg31100.txt +glared pg3200.txt +glares pg3200.txt +glares! pg100.txt +glares, pg3200.txt +glaring pg31100.txt, pg3200.txt +glaring. pg3200.txt +glas'" pg3200.txt +glasgow pg3200.txt +glass pg31100.txt, pg3200.txt, pg100.txt +glass!" pg3200.txt +glass" pg3200.txt +glass'd, pg100.txt +glass, pg3200.txt, pg100.txt +glass. pg3200.txt, pg100.txt +glass." pg3200.txt +glass.' pg3200.txt +glass: pg3200.txt +glass; pg3200.txt +glasses pg3200.txt +glasses, pg3200.txt +glasses. pg3200.txt +glasses." pg3200.txt +glassful pg3200.txt +glassily pg3200.txt +glassy pg3200.txt +glaze pg3200.txt +glazed pg3200.txt +gleam pg3200.txt +gleamed pg3200.txt +gleaming pg3200.txt +gleaming, pg3200.txt +gleams, pg3200.txt, pg100.txt +glean pg100.txt +glean, pg100.txt +gleaned pg100.txt +glee pg31100.txt +glee, pg3200.txt +gleek pg100.txt +gleeking pg100.txt +gleeks? pg100.txt +glendower, pg100.txt +glendower. pg100.txt +glens, pg3200.txt +glibly pg3200.txt +glide pg31100.txt, pg3200.txt +glide. pg100.txt +glided pg3200.txt +glides pg3200.txt +glides, pg100.txt +gliding pg3200.txt +glimmer, pg3200.txt +glimmer. pg3200.txt +glimpse pg31100.txt, pg3200.txt +glimpsed pg3200.txt +glimpses pg3200.txt +glimpses, pg3200.txt +glinted pg3200.txt +glinting pg3200.txt +glisters pg100.txt +glitter, pg3200.txt +glittering pg3200.txt +glitters pg3200.txt +glitters'; pg3200.txt +gloaming pg3200.txt +gloat pg3200.txt +gloated pg3200.txt +gloating. pg3200.txt +globe pg3200.txt, pg100.txt +globe, pg3200.txt, pg100.txt +globe--all pg3200.txt +globe. pg3200.txt +globe; pg100.txt +globe? pg3200.txt +globes. pg100.txt +gloom pg31100.txt, pg3200.txt +gloom, pg3200.txt +gloom. pg31100.txt, pg3200.txt +gloom." pg3200.txt +gloom; pg3200.txt +gloomily. pg31100.txt +glooming, pg3200.txt +gloomy pg3200.txt +gloomy, pg3200.txt +gloomy. pg3200.txt +gloried pg31100.txt, pg3200.txt +glories pg3200.txt, pg100.txt +glories! pg100.txt +glories, pg100.txt +glories. pg3200.txt +glorified pg3200.txt, pg100.txt +glorified. pg3200.txt +glorifies pg3200.txt +glorify pg3200.txt +glorify, pg100.txt +glorifying pg3200.txt +glorious pg3200.txt +glorious. pg100.txt +gloriously pg3200.txt, pg100.txt +glory pg31100.txt, pg3200.txt, pg100.txt +glory! pg3200.txt, pg100.txt +glory!" pg3200.txt +glory, pg3200.txt, pg100.txt +glory- pg100.txt +glory. pg3200.txt, pg100.txt +glory." pg3200.txt +glory.' pg3200.txt +glory: pg100.txt +glory; pg100.txt +glorying pg3200.txt +glose; pg100.txt +gloss pg100.txt +gloss, pg100.txt +gloss; pg100.txt +glossary: pg3200.txt +glossop?" pg3200.txt +glossy pg3200.txt +gloucester pg100.txt +gloucester! pg100.txt +gloucester, pg100.txt +gloucester,' pg100.txt +gloucester- pg100.txt +gloucester. pg100.txt +gloucester: pg100.txt +gloucester; pg100.txt +gloucester? pg100.txt +gloucester] pg100.txt +gloucestershire pg100.txt +gloucestershire, pg100.txt +gloucestershire. pg31100.txt, pg100.txt +gloucestershire; pg100.txt +glove pg3200.txt, pg100.txt +glove, pg100.txt +glove. pg3200.txt, pg100.txt +glove." pg3200.txt +glove; pg100.txt +glove? pg100.txt +gloved pg3200.txt +gloves pg3200.txt +gloves, pg31100.txt, pg3200.txt, pg100.txt +gloves. pg3200.txt, pg100.txt +gloves." pg31100.txt, pg3200.txt +gloving pg3200.txt +glow pg31100.txt, pg3200.txt +glow, pg3200.txt, pg100.txt +glow--"to pg31100.txt +glow-worm pg3200.txt +glow. pg3200.txt +glow; pg100.txt +glowed pg3200.txt +glowed, pg100.txt +glowed. pg3200.txt +glowed; pg31100.txt +glowing pg3200.txt +glowing, pg3200.txt +gloze pg100.txt +glue, pg3200.txt +glum pg3200.txt +glum, pg3200.txt +glycerin. pg3200.txt +glyn pg3200.txt +gnadigsten pg3200.txt +gnarled pg3200.txt +gnat! pg100.txt +gnat, pg100.txt +gnat. pg3200.txt +gnats, pg3200.txt +gnaw pg3200.txt +gnawing pg3200.txt +gnawing. pg3200.txt +gnillic, pg3200.txt +go! pg3200.txt, pg100.txt +go!" pg3200.txt +go!' pg3200.txt +go'- pg100.txt +go'st, pg100.txt +go, pg31100.txt, pg3200.txt, pg100.txt +go- pg100.txt +go-- pg3200.txt +go--and pg3200.txt +go--but pg3200.txt +go--friends pg3200.txt +go--i pg31100.txt +go--in pg3200.txt +go--leave pg3200.txt +go--presently.' pg3200.txt +go--there pg3200.txt +go--we pg3200.txt +go. pg31100.txt, pg3200.txt, pg100.txt +go." pg31100.txt, pg3200.txt +go.' pg3200.txt +go.--you pg31100.txt +go... pg3200.txt +go: pg3200.txt, pg100.txt +go; pg31100.txt, pg3200.txt, pg100.txt +go? pg3200.txt, pg100.txt +go?" pg31100.txt, pg3200.txt +go?' pg3200.txt +goa; pg3200.txt +goad pg3200.txt, pg100.txt +goad, pg3200.txt +goads, pg3200.txt +goal: pg100.txt +goat pg100.txt +goat! pg100.txt +goat, pg100.txt +goat-skins pg3200.txt +goatee pg3200.txt +goatees-- pg3200.txt +goats pg3200.txt +goats, pg100.txt +goats. pg100.txt +gobbler-of-the- pg3200.txt +gobbo pg100.txt +gobbo' pg100.txt +gobbo, pg100.txt +gobelins. pg3200.txt +goblet, pg3200.txt +goblets pg3200.txt +goblets, pg3200.txt +goblins. pg100.txt +god pg3200.txt, pg100.txt +god! pg31100.txt, pg3200.txt, pg100.txt +god!" pg3200.txt +god!--" pg3200.txt +god'll pg3200.txt +god's pg3200.txt, pg100.txt +god, pg31100.txt, pg3200.txt, pg100.txt +god- pg3200.txt, pg100.txt +god-a-mercy. pg100.txt +god-bless-our-home pg3200.txt +god-fathers; pg100.txt +god-given pg3200.txt +god-heads pg100.txt +god-i pg100.txt +god-i-god-en! pg100.txt +god. pg3200.txt, pg100.txt +god." pg3200.txt +god.' pg3200.txt +god: pg100.txt +god; pg3200.txt, pg100.txt +god? pg3200.txt, pg100.txt +god?" pg3200.txt +godbedamned--culled pg3200.txt +goddard pg31100.txt +goddard's pg31100.txt +goddard's." pg31100.txt +godden. pg100.txt +goddess pg3200.txt, pg100.txt +goddess, pg3200.txt, pg100.txt +goddess. pg3200.txt +goddess; pg100.txt +goddesses! pg100.txt +goddesses, pg100.txt +goddesses; pg100.txt +gode pg3200.txt +godey's pg3200.txt +godfather, pg100.txt +godfathers. pg100.txt +godfrey pg3200.txt +godfrey's pg3200.txt +godless pg3200.txt +godliness. pg100.txt +godliness.'" pg3200.txt +gods pg3200.txt, pg100.txt +gods! pg100.txt +gods, pg3200.txt, pg100.txt +gods- pg100.txt +gods. pg3200.txt, pg100.txt +gods.' pg100.txt +gods: pg100.txt +gods; pg3200.txt, pg100.txt +gods? pg100.txt +godship pg3200.txt +godson, pg31100.txt +godspeed. pg3200.txt +godwin pg3200.txt +godwin. pg3200.txt +goelwa pg3200.txt +goes pg31100.txt, pg3200.txt, pg100.txt +goes! pg3200.txt, pg100.txt +goes!' pg3200.txt, pg100.txt +goes, pg31100.txt, pg3200.txt, pg100.txt +goes. pg3200.txt, pg100.txt +goes." pg31100.txt +goes: pg100.txt +goes; pg3200.txt, pg100.txt +goest, pg100.txt +goest; pg100.txt +goest? pg100.txt +goeth pg3200.txt +goeth, pg3200.txt +goettingen, pg3200.txt +goffe pg100.txt +goffe; pg100.txt +goggles pg3200.txt +goggles, pg3200.txt +goin' pg3200.txt +going pg31100.txt, pg3200.txt, pg100.txt +going!" pg3200.txt +going, pg31100.txt, pg3200.txt, pg100.txt +going--" pg3200.txt +going--leave pg3200.txt +going--one--" pg3200.txt +going--one--two--" pg3200.txt +going--the pg3200.txt +going. pg31100.txt, pg3200.txt, pg100.txt +going." pg31100.txt +going.' pg3200.txt +going; pg31100.txt, pg100.txt +going? pg100.txt +going?" pg31100.txt, pg3200.txt +going] pg100.txt +goings pg31100.txt, pg3200.txt +goings-on, pg3200.txt +golconda pg3200.txt +golconda. pg3200.txt +gold pg3200.txt, pg100.txt +gold! pg3200.txt, pg100.txt +gold!" pg3200.txt +gold, pg31100.txt, pg3200.txt, pg100.txt +gold--they pg3200.txt +gold-dust. pg3200.txt +gold-lace pg3200.txt +gold-mine pg3200.txt +gold-strike pg3200.txt +gold. pg3200.txt, pg100.txt +gold." pg3200.txt +gold.' pg100.txt +gold: pg3200.txt, pg100.txt +gold; pg31100.txt, pg3200.txt, pg100.txt +gold? pg100.txt +gold] pg100.txt +golden pg3200.txt, pg100.txt +golden, pg3200.txt +golden. pg3200.txt +goldfish; pg3200.txt +goldsmith pg100.txt +goldsmith's pg3200.txt +golgotha, pg100.txt +goliah!" pg3200.txt +goliah, pg3200.txt +goliases pg100.txt +goliath, pg3200.txt +goliath. pg3200.txt +goliath? pg3200.txt +gomorrah pg3200.txt +gomorrah!" pg3200.txt +gomorrah. pg3200.txt +gondola pg3200.txt +gondola, pg3200.txt +gondola. pg3200.txt +gondolas pg3200.txt +gondolas, pg3200.txt +gondolier, pg100.txt +gondolier; pg3200.txt +gondour pg3200.txt +gone pg31100.txt, pg3200.txt, pg100.txt +gone! pg3200.txt, pg100.txt +gone!" pg31100.txt, pg3200.txt +gone" pg3200.txt +gone, pg31100.txt, pg3200.txt, pg100.txt +gone- pg100.txt +gone--" pg3200.txt +gone--alas, pg3200.txt +gone--and pg3200.txt +gone--lost pg3200.txt +gone--oh, pg3200.txt +gone. pg31100.txt, pg3200.txt, pg100.txt +gone." pg31100.txt, pg3200.txt +gone.'" pg3200.txt +gone.... pg3200.txt +gone; pg31100.txt, pg3200.txt, pg100.txt +gone? pg3200.txt, pg100.txt +gone?" pg31100.txt, pg3200.txt +gone?' pg3200.txt +goner!" pg3200.txt +goner. pg3200.txt +goneril! pg100.txt +goneril, pg100.txt +goneril. pg100.txt +goneril; pg100.txt +goneril? pg100.txt +goners! pg3200.txt +goners, pg3200.txt +gong, pg3200.txt +gonzago'? pg100.txt +gonzago. pg100.txt +gonzalo pg100.txt +gonzalo! pg100.txt +gonzalo'; pg100.txt +gonzalo, pg100.txt +gonzalo. pg100.txt +gonzalo? pg100.txt +goo-gooing pg3200.txt +good! pg31100.txt, pg3200.txt, pg100.txt +good!" pg31100.txt, pg3200.txt +good). pg3200.txt +good, pg31100.txt, pg3200.txt, pg100.txt +good," pg31100.txt +good- pg100.txt +good-- pg3200.txt +good--"considering." pg3200.txt +good--hautboy pg31100.txt +good--i pg3200.txt +good--there pg31100.txt +good-breeding pg31100.txt +good-breeding. pg31100.txt +good-by pg3200.txt +good-by! pg3200.txt +good-by, pg3200.txt +good-by. pg3200.txt +good-by: pg3200.txt +good-by; pg3200.txt +good-bye pg3200.txt +good-bye, pg3200.txt +good-bye. pg31100.txt, pg3200.txt +good-bye." pg31100.txt, pg3200.txt +good-bye?" pg3200.txt +good-conceited pg100.txt +good-day." pg3200.txt +good-den? pg100.txt +good-fellowship, pg3200.txt +good-fellowship- pg100.txt +good-fellowship--a pg3200.txt +good-good, pg3200.txt +good-hearted pg3200.txt +good-hearted, pg3200.txt +good-hearted. pg3200.txt +good-humored pg3200.txt +good-humour, pg31100.txt +good-humour. pg31100.txt +good-humour; pg31100.txt +good-humoured pg31100.txt +good-humoured; pg31100.txt +good-looking pg31100.txt +good-nature pg31100.txt +good-nature. pg31100.txt, pg3200.txt +good-natured pg31100.txt +good-natured, pg31100.txt, pg3200.txt +good-natured. pg3200.txt +good-natured; pg3200.txt +good-naturedly pg3200.txt +good-naturedly, pg3200.txt +good-night, pg3200.txt +good-night--custom pg3200.txt +good-night." pg3200.txt +good-will pg31100.txt +good-will, pg31100.txt, pg3200.txt +good-will. pg31100.txt +good-will? pg100.txt +good-year! pg100.txt +good. pg31100.txt, pg3200.txt, pg100.txt +good." pg31100.txt, pg3200.txt +good: pg100.txt +good; pg31100.txt, pg3200.txt, pg100.txt +good? pg3200.txt, pg100.txt +good?" pg31100.txt, pg3200.txt +good?' pg3200.txt +good] pg3200.txt +good_, pg31100.txt +goodby. pg3200.txt +goodbye pg3200.txt +goodbye, pg3200.txt +goodbye." pg3200.txt +goodfellow pg100.txt +goodfellow. pg100.txt +goodhearted, pg3200.txt +goodly pg3200.txt, pg100.txt +goodman pg3200.txt +goodman, pg3200.txt +goodman. pg3200.txt, pg100.txt +goodman." pg3200.txt +goodman: pg3200.txt +goodness pg31100.txt, pg3200.txt, pg100.txt +goodness!" pg3200.txt +goodness, pg31100.txt, pg100.txt +goodness- pg100.txt +goodness. pg31100.txt, pg100.txt +goodness." pg31100.txt, pg3200.txt +goodness; pg100.txt +goodnight. pg100.txt +goods pg3200.txt, pg100.txt +goods, pg100.txt +goods. pg3200.txt, pg100.txt +goods." pg3200.txt +goods; pg3200.txt, pg100.txt +goodson. pg3200.txt +goodson." pg3200.txt +goodwife. pg3200.txt +goodwife." pg3200.txt +goodwill pg31100.txt +goodwill, pg31100.txt +goodwill. pg31100.txt +goodwill." pg31100.txt +goodwins pg100.txt +googling pg3200.txt +goomeroo pg3200.txt +goondiwindi pg3200.txt +goose pg3200.txt +goose! pg100.txt +goose's pg3200.txt +goose, pg3200.txt +goose-pen, pg100.txt +goose-quill pg3200.txt +goose. pg100.txt +goose? pg100.txt +gooseberry pg3200.txt +gooseberry. pg3200.txt +goosequills pg100.txt +gophers; pg3200.txt +gor'd. pg100.txt +gordon pg3200.txt +gordon, pg3200.txt +gordon: pg3200.txt +gore, pg100.txt +gore. pg100.txt +gore; pg100.txt +gorge pg3200.txt +gorge!' pg100.txt +gorge, pg3200.txt +gorge. pg3200.txt, pg100.txt +gorge; pg3200.txt +gorgeous pg3200.txt +gorgeous, pg100.txt +gorgeousness, pg3200.txt +gorgeousnesses, pg3200.txt +gorges, pg3200.txt +gorget, pg100.txt +gorgon, pg100.txt +gorilla, pg3200.txt +gorillas pg3200.txt +gorillas. pg3200.txt +gorillas." pg3200.txt +gorky. pg3200.txt +gormandize pg100.txt +gorner pg3200.txt +gorse pg3200.txt +gory, pg3200.txt +goshen's pg3200.txt +goshen. pg3200.txt +goshen." pg3200.txt +goshen?" pg3200.txt +goshoots pg3200.txt +gospel pg3200.txt +gospel'd, pg100.txt +gospel, pg3200.txt +gospel. pg3200.txt +gospel: pg3200.txt +gospel? pg3200.txt +gospels pg3200.txt +gospels, pg3200.txt +gospels. pg3200.txt +gossamer pg100.txt +gossip pg3200.txt +gossip, pg3200.txt +gossip-- pg3200.txt +gossip. pg31100.txt, pg3200.txt +gossiped pg3200.txt +gossiping pg31100.txt +gossiping. pg100.txt +gossiping? pg100.txt +gossips pg3200.txt +got! pg3200.txt +got!" pg3200.txt +got'st pg3200.txt +got, pg31100.txt, pg3200.txt, pg100.txt +got. pg3200.txt, pg100.txt +got." pg31100.txt, pg3200.txt +got.' pg3200.txt +got; pg3200.txt, pg100.txt +got? pg100.txt +got?" pg3200.txt +got?' pg3200.txt +goth, pg100.txt +goth? pg100.txt +goths pg100.txt +goths, pg3200.txt, pg100.txt +goths- pg100.txt +goths. pg100.txt +goths; pg100.txt +goths? pg100.txt +gott" pg3200.txt +gott, pg3200.txt +gotten pg3200.txt +gotten? pg100.txt +gotteswillen?" pg3200.txt +gottfried pg3200.txt +gouge pg3200.txt +gouged pg3200.txt +gouged, pg3200.txt +gould pg3200.txt +gourd pg3200.txt +gourd, pg3200.txt +gourmandise!" pg3200.txt +gout pg31100.txt, pg3200.txt, pg100.txt +gout, pg3200.txt +gouts pg3200.txt +gouty pg31100.txt +gouty." pg31100.txt +gov. pg3200.txt +govern pg31100.txt, pg3200.txt, pg100.txt +govern'd pg100.txt +govern'd, pg100.txt +govern, pg31100.txt +govern. pg100.txt +govern? pg100.txt +govern?" pg3200.txt +governable. pg3200.txt +governance? pg100.txt +governed pg31100.txt, pg3200.txt +governed. pg31100.txt +governess pg3200.txt +governess, pg31100.txt +governess." pg31100.txt +governing pg3200.txt +government pg31100.txt, pg3200.txt +government's pg3200.txt +government, pg3200.txt, pg100.txt +government--a pg3200.txt +government--now pg3200.txt +government--which pg3200.txt +government. pg3200.txt, pg100.txt +government." pg3200.txt +government..... pg3200.txt +government: pg3200.txt +government; pg3200.txt +government? pg3200.txt +governments pg3200.txt +governor pg3200.txt, pg100.txt +governor! pg100.txt +governor's pg3200.txt +governor, pg3200.txt, pg100.txt +governor--" pg3200.txt +governor-general pg3200.txt +governor-general, pg3200.txt +governor. pg3200.txt, pg100.txt +governor." pg3200.txt +governor; pg100.txt +governors pg3200.txt +governors, pg3200.txt +governs pg3200.txt +govment pg3200.txt +gowen)-- pg3200.txt +gower pg100.txt +gower. pg100.txt +gower: pg100.txt +gower? pg100.txt +gown pg31100.txt, pg3200.txt, pg100.txt +gown'- pg100.txt +gown, pg31100.txt, pg3200.txt, pg100.txt +gown--" pg31100.txt +gown--a pg3200.txt +gown--from pg3200.txt +gown. pg31100.txt, pg3200.txt, pg100.txt +gown." pg31100.txt +gown: pg100.txt +gown; pg31100.txt, pg100.txt +gown?" pg31100.txt +gowns pg31100.txt +gowns, pg3200.txt, pg100.txt +gowns. pg31100.txt +grab pg3200.txt +grab' pg3200.txt +grab--if pg3200.txt +grabbed pg3200.txt +grabbing pg3200.txt +grabed pg3200.txt +grabs pg3200.txt +grac'd pg100.txt +grac'd, pg100.txt +grace pg31100.txt, pg3200.txt, pg100.txt +grace! pg31100.txt, pg3200.txt, pg100.txt +grace's pg100.txt +grace, pg31100.txt, pg3200.txt, pg100.txt +grace- pg100.txt +grace--offered pg3200.txt +grace. pg3200.txt, pg100.txt +grace." pg3200.txt +grace.* pg3200.txt +grace: pg100.txt +grace; pg3200.txt, pg100.txt +grace? pg100.txt +grace?" pg3200.txt +gracechurch pg31100.txt +graced pg3200.txt +graceful pg31100.txt, pg3200.txt +graceful!" pg3200.txt +graceful, pg3200.txt +graceful. pg31100.txt +graceful; pg3200.txt +gracefully pg31100.txt, pg3200.txt +gracefully, pg31100.txt, pg3200.txt +gracefulness, pg3200.txt +graces pg31100.txt, pg3200.txt, pg100.txt +graces! pg31100.txt +graces, pg3200.txt, pg100.txt +graces. pg31100.txt, pg3200.txt, pg100.txt +graces; pg100.txt +gracia, pg3200.txt +gracia.) pg3200.txt +gracing pg3200.txt +gracious pg31100.txt, pg3200.txt +gracious! pg31100.txt, pg3200.txt +gracious, pg3200.txt, pg100.txt +gracious. pg100.txt +graciously. pg31100.txt +gradation pg31100.txt +gradations pg31100.txt +grade pg3200.txt +grade, pg3200.txt +grade. pg3200.txt +graded pg3200.txt +grades pg3200.txt +grades, pg3200.txt +grades. pg3200.txt +gradual pg31100.txt, pg3200.txt +gradually pg31100.txt, pg3200.txt +gradually, pg3200.txt +gradually--' pg3200.txt +gradually. pg3200.txt +gradually; pg3200.txt +graduated pg3200.txt +graduated--in pg3200.txt +graff pg100.txt +graft pg3200.txt +grafted pg100.txt +grafters? pg100.txt +graham pg31100.txt +graham, pg31100.txt +graham." pg31100.txt +grail pg3200.txt +grailing, pg3200.txt +grain pg3200.txt, pg100.txt +grain, pg3200.txt, pg100.txt +grain. pg3200.txt +graining pg3200.txt +grains pg3200.txt, pg100.txt +grains: pg100.txt +gramercy. pg100.txt +grammar pg3200.txt +grammar, pg3200.txt +grammar. pg3200.txt +grammar." pg31100.txt +grammar; pg3200.txt +gran' pg3200.txt +granaries pg3200.txt +granary pg3200.txt +grand pg31100.txt, pg3200.txt +grand! pg3200.txt +grand!" pg3200.txt +grand, pg3200.txt +grand-children pg31100.txt +grand-children." pg31100.txt +grand-children; pg31100.txt +grand-daughters. pg3200.txt +grand-ducal pg3200.txt +grand-father pg31100.txt +grand. pg3200.txt, pg100.txt +grand; pg100.txt +grandam pg100.txt +grandam! pg100.txt +grandam; pg100.txt +grandam? pg100.txt +grandaughter pg31100.txt +grandchildren pg3200.txt +grandchildren, pg3200.txt +granddaughter, pg3200.txt +grandee pg3200.txt +grandees pg3200.txt +grandees. pg3200.txt +grander pg3200.txt +grandest pg3200.txt +grandeur pg31100.txt, pg3200.txt +grandeur!" pg3200.txt +grandeur, pg3200.txt +grandeur. pg3200.txt +grandeur; pg3200.txt +grandeurs pg3200.txt +grandeurs, pg3200.txt +grandfather pg31100.txt, pg3200.txt, pg100.txt +grandfather! pg100.txt +grandfather's pg3200.txt +grandfather's. pg100.txt +grandfather, pg3200.txt +grandfather. pg3200.txt, pg100.txt +grandfather?" pg3200.txt +grandfathers pg3200.txt +grandiloquent pg3200.txt +grandly pg3200.txt +grandly, pg3200.txt +grandmama pg31100.txt +grandmama's." pg31100.txt +grandmother pg31100.txt, pg3200.txt +grandmother!' pg3200.txt +grandmother, pg31100.txt, pg100.txt +grandmother. pg3200.txt, pg100.txt +grandmother." pg3200.txt +grandmother: pg100.txt +grandmothers, pg31100.txt +grandpapa, pg31100.txt +grandpre pg100.txt +grandpre. pg100.txt +grands pg3200.txt +grandsire pg100.txt +grandsire, pg100.txt +grandsire. pg100.txt +grandson pg31100.txt +grandson; pg3200.txt +grange. pg100.txt +grangerfords pg3200.txt +grangerfords, pg3200.txt +granite pg3200.txt +granite-bound pg3200.txt +grant pg31100.txt, pg3200.txt, pg100.txt +grant's pg31100.txt, pg3200.txt +grant's) pg3200.txt +grant, pg31100.txt, pg3200.txt, pg100.txt +grant--and, pg31100.txt +grant. pg31100.txt, pg3200.txt +grant." pg31100.txt, pg3200.txt +grant: pg3200.txt +grant; pg3200.txt, pg100.txt +grant?" pg3200.txt +granted pg31100.txt, pg3200.txt, pg100.txt +granted, pg3200.txt, pg100.txt +granted. pg3200.txt, pg100.txt +granted; pg3200.txt, pg100.txt +granted? pg3200.txt, pg100.txt +granting pg3200.txt +granting, pg100.txt +grantley's." pg31100.txt +grants. pg31100.txt, pg3200.txt, pg100.txt +grape pg3200.txt, pg100.txt +grape-vines, pg3200.txt +grape-vines. pg3200.txt +grape. pg3200.txt +grapes pg3200.txt +grapes!" pg3200.txt +grapes, pg100.txt +grapes. pg3200.txt, pg100.txt +grapevine, pg3200.txt +grapple pg100.txt +grasp pg3200.txt, pg100.txt +grasp! pg3200.txt +grasp'd pg100.txt +grasp, pg3200.txt +grasp. pg3200.txt +grasp: pg3200.txt +grasping pg3200.txt +grass pg3200.txt, pg100.txt +grass, pg3200.txt, pg100.txt +grass," pg3200.txt +grass-blades pg3200.txt +grass-plot pg3200.txt +grass. pg3200.txt, pg100.txt +grasshopper pg3200.txt +grasshoppers, pg3200.txt +grasshoppers; pg100.txt +grassy pg3200.txt +grat, pg3200.txt +grat--" pg3200.txt +grat] pg3200.txt +grate pg31100.txt, pg3200.txt, pg100.txt +grate, pg100.txt +grated pg3200.txt, pg100.txt +grateful pg31100.txt, pg3200.txt, pg100.txt +grateful, pg31100.txt, pg3200.txt +grateful--mrs. pg3200.txt +grateful. pg31100.txt, pg3200.txt, pg100.txt +grateful; pg3200.txt +grateful? pg3200.txt +gratefullest pg3200.txt +gratefully pg31100.txt, pg3200.txt +gratefully, pg3200.txt +gratefully-- pg3200.txt +gratefully. pg31100.txt, pg3200.txt +gratefully." pg3200.txt +gratefully.--but pg31100.txt +gratefulness pg3200.txt +grates pg3200.txt +gratiano pg100.txt +gratiano! pg100.txt +gratiano, pg100.txt +gratiano- pg100.txt +gratiano. pg100.txt +gratiano: pg100.txt +gratification pg31100.txt, pg3200.txt +gratification, pg3200.txt +gratification. pg31100.txt, pg3200.txt +gratification." pg31100.txt, pg3200.txt +gratification: pg3200.txt +gratification; pg3200.txt +gratifications pg31100.txt +gratified pg31100.txt, pg3200.txt +gratified, pg31100.txt +gratified--and pg31100.txt +gratified--i pg3200.txt +gratified. pg31100.txt, pg3200.txt +gratify pg31100.txt, pg3200.txt, pg100.txt +gratifying pg31100.txt, pg3200.txt +gratifying, pg31100.txt +gratifyingly pg3200.txt +grating pg3200.txt +grating, pg3200.txt +gratings pg3200.txt +gratis. pg3200.txt, pg100.txt +gratitude pg31100.txt, pg3200.txt, pg100.txt +gratitude! pg3200.txt +gratitude!" pg3200.txt +gratitude, pg31100.txt, pg3200.txt +gratitude--artless, pg31100.txt +gratitude--mr. pg3200.txt +gratitude. pg31100.txt, pg3200.txt, pg100.txt +gratitude." pg31100.txt +gratitude: pg3200.txt +gratitude; pg31100.txt +gratuitous pg3200.txt +gratuity pg3200.txt +gratulate. pg100.txt +grave pg31100.txt, pg3200.txt, pg100.txt +grave! pg3200.txt, pg100.txt +grave!" pg3200.txt +grave, pg31100.txt, pg3200.txt, pg100.txt +grave- pg100.txt +grave--tenderly, pg3200.txt +grave-maker? pg100.txt +grave-makers. pg100.txt +grave-making? pg100.txt +grave. pg31100.txt, pg3200.txt, pg100.txt +grave." pg3200.txt +grave.] pg100.txt +grave: pg3200.txt, pg100.txt +grave; pg3200.txt, pg100.txt +grave? pg100.txt +grave?" pg3200.txt +gravediggers. pg100.txt +gravel pg31100.txt, pg3200.txt, pg100.txt +gravel-i pg100.txt +gravel. pg3200.txt +gravel; pg31100.txt +gravels pg3200.txt +gravely pg31100.txt, pg3200.txt +gravely, pg3200.txt +gravely--"i pg31100.txt +gravely--giving pg3200.txt +gravely: pg3200.txt +graven pg3200.txt +graver pg3200.txt, pg100.txt +graver. pg3200.txt +graves pg3200.txt, pg100.txt +graves) pg100.txt +graves, pg3200.txt, pg100.txt +graves- pg100.txt +graves. pg100.txt +graves; pg3200.txt +gravest pg3200.txt +gravestone, pg3200.txt +gravestone--and pg3200.txt +graveyard pg3200.txt +graveyard!" pg3200.txt +graveyard, pg3200.txt +graveyard. pg3200.txt +graveyard." pg3200.txt +graveyard; pg3200.txt +graveyard?" pg3200.txt +graveyards pg3200.txt +graveyards, pg3200.txt +graveyards. pg3200.txt +gravitation pg3200.txt +gravitation. pg3200.txt +gravitation?" pg3200.txt +gravities pg3200.txt +gravities, pg100.txt +gravity pg31100.txt, pg3200.txt, pg100.txt +gravity, pg31100.txt, pg3200.txt, pg100.txt +gravity. pg31100.txt, pg3200.txt, pg100.txt +gravity; pg100.txt +gravy, pg100.txt +gravy,) pg3200.txt +gray pg3200.txt +gray's pg31100.txt +gray, pg31100.txt, pg3200.txt +gray-haired pg3200.txt +gray-headed pg3200.txt +gray. pg3200.txt, pg100.txt +gray.'" pg3200.txt +gray: pg3200.txt +grayer pg3200.txt +graying pg3200.txt +graymalkin. pg100.txt +graze. pg100.txt +grazing pg100.txt +grease pg3200.txt +grease, pg3200.txt, pg100.txt +grease-biscuit, pg3200.txt +grease. pg100.txt +grease?" pg3200.txt +greasiest pg3200.txt +greasy pg3200.txt, pg100.txt +greasy. pg100.txt +great! pg31100.txt, pg3200.txt, pg100.txt +great'st pg100.txt +great, pg31100.txt, pg3200.txt, pg100.txt +great,'- pg100.txt +great- pg100.txt +great--this pg3200.txt +great-grandchildren. pg3200.txt +great-grandfather pg3200.txt, pg100.txt +great-grandfather. pg100.txt +great-grandmother pg3200.txt +great-great-gran'mother, pg3200.txt +great-great-grandfather pg3200.txt +great-great-grandfathers pg3200.txt +great-great-great-grandfathers pg3200.txt +great-hearted pg3200.txt +great-uncle pg31100.txt +great. pg31100.txt, pg3200.txt, pg100.txt +great." pg31100.txt +great: pg3200.txt, pg100.txt +great; pg100.txt +greatcoat pg31100.txt +greatcoats. pg31100.txt +greater pg31100.txt, pg3200.txt, pg100.txt +greater, pg31100.txt, pg100.txt +greater. pg31100.txt, pg3200.txt, pg100.txt +greater..... pg3200.txt +greater? pg100.txt +greatest pg31100.txt, pg3200.txt +greatest, pg31100.txt +greatest. pg3200.txt +greatly pg31100.txt, pg3200.txt, pg100.txt +greatly, pg3200.txt +greatly. pg3200.txt +greatness pg31100.txt, pg3200.txt, pg100.txt +greatness! pg100.txt +greatness, pg100.txt +greatness,'- pg100.txt +greatness- pg100.txt +greatness--unconsciousness pg3200.txt +greatness. pg3200.txt +greatnesses pg3200.txt +grecian pg3200.txt +grecians? pg100.txt +greece pg100.txt +greece, pg3200.txt, pg100.txt +greece- pg100.txt +greece. pg3200.txt, pg100.txt +greece." pg3200.txt +greek pg3200.txt, pg100.txt +greek! pg100.txt +greek, pg100.txt +greek. pg31100.txt, pg3200.txt, pg100.txt +greek?" pg3200.txt +greeks pg100.txt +greeks! pg100.txt +greeks, pg3200.txt, pg100.txt +greeks: pg100.txt +greeks; pg100.txt +greeley pg3200.txt +greeley, pg3200.txt +greeley----" pg3200.txt +greeley.' pg3200.txt +green pg3200.txt, pg100.txt +green! pg100.txt +green, pg3200.txt, pg100.txt +green--they pg3200.txt +green-a pg100.txt +green-cloth pg3200.txt +green-patch!" pg3200.txt +green-spectacled, pg3200.txt +green. pg3200.txt, pg100.txt +green." pg3200.txt +green; pg100.txt +green? pg100.txt +greenbacks pg3200.txt +greenbacks. pg3200.txt +greenbacks? pg3200.txt +greener pg3200.txt +greenest pg3200.txt +greening pg3200.txt +greenland pg3200.txt +greenland's pg3200.txt +greenleaf pg3200.txt +greenly pg100.txt +greenwich, pg100.txt +greer pg3200.txt +greet pg3200.txt, pg100.txt +greet, pg100.txt +greet. pg100.txt +greet: pg100.txt +greeted pg31100.txt, pg3200.txt +greeting pg3200.txt, pg100.txt +greeting, pg3200.txt, pg100.txt +greeting. pg100.txt +greeting.' pg100.txt +greetings pg3200.txt, pg100.txt +greetings, pg100.txt +greetings; pg100.txt +gregorig pg3200.txt +gregorig,--the pg3200.txt +gregorig. pg3200.txt +gregorig.' pg3200.txt +gregory pg100.txt +gregory; pg100.txt +greif pg31100.txt +greif--but pg31100.txt +gremio pg100.txt +gremio! pg100.txt +gremio, pg100.txt +gremio. pg100.txt +gremio? pg100.txt +grenadier. pg3200.txt +grenoble pg3200.txt +grenouille, pg3200.txt +grenouille. pg3200.txt +grenville pg31100.txt +grenville?" pg31100.txt +gretchen pg3200.txt +gretchen! pg3200.txt +gretchen. pg3200.txt +gretchen.] pg3200.txt +grete pg3200.txt +gretna pg31100.txt +gretna-green, pg31100.txt +greville pg31100.txt +greville. pg31100.txt +grew pg31100.txt, pg3200.txt, pg100.txt +grew, pg3200.txt +grew. pg3200.txt, pg100.txt +grew: pg100.txt +grew; pg3200.txt, pg100.txt +grew? pg100.txt +grey pg31100.txt, pg3200.txt, pg100.txt +grey, pg3200.txt, pg100.txt +grey--it pg31100.txt +grey-heads pg3200.txt +grey. pg100.txt +grey." pg3200.txt +grey; pg100.txt +grey? pg100.txt +greyhound, pg100.txt +greyhound. pg100.txt +greyhound; pg3200.txt +greyhounds pg100.txt +greyhounds. pg100.txt +griddle-cakes." pg3200.txt +griddle; pg3200.txt +gridiron, pg3200.txt +gridley pg3200.txt +grief pg31100.txt, pg3200.txt, pg100.txt +grief! pg100.txt +grief's pg100.txt +grief, pg3200.txt, pg100.txt +grief- pg100.txt +grief-shot pg100.txt +grief. pg3200.txt, pg100.txt +grief." pg31100.txt +grief; pg3200.txt, pg100.txt +grief? pg100.txt +grief?" pg3200.txt +grief?' pg100.txt +griefs pg100.txt +griefs! pg100.txt +griefs, pg3200.txt, pg100.txt +griefs. pg3200.txt, pg100.txt +grierson pg31100.txt +griev'd pg100.txt +griev'd, pg100.txt +grievance pg100.txt +grievance, pg3200.txt +grievance. pg31100.txt, pg100.txt +grievances pg31100.txt, pg100.txt +grievances, pg31100.txt, pg3200.txt, pg100.txt +grievances. pg100.txt +grievances; pg100.txt +grieve pg31100.txt, pg3200.txt, pg100.txt +grieve, pg3200.txt +grieve. pg3200.txt +grieve; pg100.txt +grieved pg31100.txt, pg3200.txt +grieved, pg31100.txt, pg3200.txt +grieved- pg100.txt +grieved. pg3200.txt +grieves pg3200.txt, pg100.txt +grieveth pg3200.txt +grieving pg31100.txt, pg3200.txt, pg100.txt +grieving!" pg3200.txt +grieving, pg100.txt +grieving. pg3200.txt, pg100.txt +grievingly pg100.txt +grievous pg31100.txt, pg3200.txt +grievous, pg31100.txt +grievous. pg100.txt +grievously pg100.txt +grievously. pg100.txt +grievously." pg31100.txt +grievously? pg100.txt +griffith, pg100.txt +griffith. pg100.txt +griffith; pg100.txt +grim pg3200.txt +grim, pg3200.txt, pg100.txt +grim. pg100.txt +grimace, pg3200.txt +grime pg3200.txt, pg100.txt +grime--they pg3200.txt +grimes pg3200.txt +grimes' pg3200.txt +grimes. pg3200.txt +grimes: pg3200.txt +grimly pg3200.txt, pg100.txt +grimly, pg3200.txt, pg100.txt +grin pg100.txt +grin, pg100.txt +grind pg3200.txt, pg100.txt +grindelwald. pg3200.txt +grinding pg3200.txt +grinding. pg100.txt +grinds pg3200.txt +grindstone pg3200.txt +grindstone. pg3200.txt +grip pg3200.txt +grip, pg3200.txt +grip-sacks pg3200.txt +grip. pg3200.txt +gripe, pg100.txt +gripes pg3200.txt +gripped pg3200.txt +gripping pg3200.txt +gris, pg3200.txt +grisette?" pg3200.txt +grisettes-- pg3200.txt +grisi pg3200.txt +grisly pg3200.txt +grissel, pg100.txt +grit pg3200.txt +gritting pg3200.txt +grizzled pg3200.txt +grizzly pg3200.txt +groan pg3200.txt, pg100.txt +groan! pg100.txt +groan'd pg100.txt +groan, pg3200.txt, pg100.txt +groan. pg100.txt +groan: pg100.txt +groan; pg100.txt +groaning pg3200.txt +groaning, pg3200.txt +groanings pg3200.txt +groans pg3200.txt, pg100.txt +groans, pg3200.txt, pg100.txt +groans. pg31100.txt, pg3200.txt, pg100.txt +groans.] pg3200.txt +groans; pg3200.txt, pg100.txt +groans? pg100.txt +groans] pg100.txt +groat! pg100.txt +grocco--she pg3200.txt +grocer's. pg3200.txt +groceries pg3200.txt +grog, pg3200.txt +grog. pg31100.txt +groggy pg3200.txt +groggy, pg3200.txt +groom pg3200.txt, pg100.txt +groom, pg31100.txt +groom. pg100.txt +groom; pg100.txt +grooms pg100.txt +grooms! pg100.txt +grooms. pg100.txt +grooms? pg100.txt +groove pg3200.txt +grooves. pg3200.txt +groped pg3200.txt +groping pg31100.txt, pg3200.txt +groping, pg3200.txt +gropingly pg3200.txt +gropings pg3200.txt +gros pg100.txt +groschen pg3200.txt +gross pg31100.txt, pg3200.txt, pg100.txt +gross! pg100.txt +gross, pg31100.txt, pg100.txt +gross- pg100.txt +gross; pg100.txt +gross? pg3200.txt +grossly pg31100.txt +grossly. pg100.txt +grossmith's pg3200.txt +grossness pg100.txt +grosvenor, pg3200.txt +grotesque pg3200.txt +grotesque. pg3200.txt +grotesquely pg3200.txt +grotesqueness pg3200.txt +grotesqueries pg3200.txt +grotesqueries, pg3200.txt +grotesquest pg3200.txt +grotto--tradition pg3200.txt +grotto. pg3200.txt +grottoes pg3200.txt +ground pg31100.txt, pg3200.txt, pg100.txt +ground! pg3200.txt, pg100.txt +ground!' pg100.txt +ground"--then pg3200.txt +ground, pg31100.txt, pg3200.txt, pg100.txt +ground-- pg3200.txt +ground--the pg3200.txt +ground--why pg3200.txt +ground-connection--" pg3200.txt +ground-connection. pg3200.txt +ground-floor pg3200.txt +ground. pg31100.txt, pg3200.txt, pg100.txt +ground." pg3200.txt +ground: pg3200.txt, pg100.txt +ground; pg3200.txt, pg100.txt +ground? pg100.txt +ground?" pg31100.txt +ground] pg100.txt +groundless. pg31100.txt, pg3200.txt +groundlings, pg100.txt +grounds pg31100.txt, pg3200.txt, pg100.txt +grounds, pg31100.txt, pg3200.txt, pg100.txt +grounds--flying pg3200.txt +grounds. pg31100.txt, pg3200.txt +grounds." pg31100.txt +grounds; pg3200.txt +grounds? pg31100.txt +groundwork pg31100.txt, pg3200.txt +group pg31100.txt, pg3200.txt +group--this pg3200.txt +group. pg3200.txt +group: pg31100.txt +grouped pg3200.txt +grouping pg3200.txt +groupings pg3200.txt +groups pg3200.txt +groups, pg31100.txt, pg3200.txt +grove pg31100.txt, pg3200.txt, pg100.txt +grove, pg3200.txt, pg100.txt +grove--and pg31100.txt +grove-plumed pg3200.txt +grove. pg31100.txt, pg3200.txt, pg100.txt +grove." pg31100.txt, pg3200.txt +grove: pg100.txt +grove? pg31100.txt, pg100.txt +grovel pg3200.txt +grovels pg3200.txt +groves pg3200.txt, pg100.txt +groves, pg3200.txt, pg100.txt +groves. pg3200.txt +groves; pg100.txt +grow pg31100.txt, pg3200.txt, pg100.txt +grow! pg3200.txt, pg100.txt +grow'st, pg100.txt +grow'st. pg100.txt +grow, pg3200.txt, pg100.txt +grow--for pg3200.txt +grow. pg3200.txt, pg100.txt +grow; pg3200.txt, pg100.txt +grow? pg31100.txt, pg100.txt +growed pg3200.txt +groweth pg3200.txt +growing pg31100.txt, pg3200.txt, pg100.txt +growing, pg100.txt +growing- pg100.txt +growing-pains, pg3200.txt +growing. pg3200.txt, pg100.txt +growl pg3200.txt +growled pg3200.txt +growled, pg3200.txt +growling pg3200.txt +grown pg31100.txt, pg3200.txt, pg100.txt +grown, pg31100.txt +grown-up pg3200.txt +grown? pg3200.txt +grows pg31100.txt, pg3200.txt, pg100.txt +grows! pg3200.txt +grows, pg3200.txt, pg100.txt +grows. pg3200.txt, pg100.txt +grows; pg100.txt +growth pg3200.txt, pg100.txt +growth. pg3200.txt, pg100.txt +growth; pg100.txt +grub, pg100.txt +grubless pg3200.txt +grubs; pg3200.txt +grudge: pg100.txt +grudging pg3200.txt +gruel. pg3200.txt +gruel." pg31100.txt +gruesome pg3200.txt +grumble pg3200.txt +grumbled pg3200.txt +grumbling pg3200.txt +grumbling, pg3200.txt +grumbling. pg3200.txt +grumio pg100.txt +grumio! pg100.txt +grumio, pg100.txt +grumio. pg100.txt +grumio? pg100.txt +grummorsum pg3200.txt +grummorsum, pg3200.txt +grunted pg3200.txt +grunting, pg3200.txt +grunts pg3200.txt +guano; pg3200.txt +guarantee pg3200.txt +guarantees pg3200.txt +guaranty pg3200.txt +guard pg31100.txt, pg3200.txt, pg100.txt +guard!' pg3200.txt +guard" pg3200.txt +guard). pg3200.txt +guard, pg31100.txt, pg3200.txt, pg100.txt +guard--and pg3200.txt +guard--inside, pg3200.txt +guard-house pg3200.txt +guard. pg3200.txt, pg100.txt +guard: pg3200.txt +guard; pg3200.txt, pg100.txt +guard? pg100.txt +guarded pg31100.txt, pg3200.txt, pg100.txt +guarded, pg31100.txt, pg3200.txt, pg100.txt +guarded] pg100.txt +guarded]. pg100.txt +guardian pg3200.txt +guardian, pg31100.txt, pg3200.txt +guardian. pg31100.txt, pg100.txt +guardian." pg31100.txt +guardians pg3200.txt +guardians, pg3200.txt +guardianship pg3200.txt +guardianship!--how pg31100.txt +guarding pg31100.txt, pg3200.txt +guards pg3200.txt, pg100.txt +guards, pg3200.txt +guards--two pg3200.txt +guards. pg3200.txt +guards: pg3200.txt +guards; pg3200.txt +guardsman pg100.txt +guavas, pg3200.txt +guenever pg3200.txt +guerre'. pg3200.txt +guesclin, pg3200.txt +guess pg31100.txt, pg3200.txt, pg100.txt +guess!" pg3200.txt +guess'd. pg100.txt +guess, pg3200.txt, pg100.txt +guess. pg31100.txt, pg3200.txt, pg100.txt +guess." pg31100.txt, pg3200.txt +guess? pg31100.txt, pg3200.txt, pg100.txt +guess?" pg31100.txt +guess?--i pg31100.txt +guessable. pg3200.txt +guessed pg31100.txt, pg3200.txt +guessed, pg31100.txt +guessed. pg31100.txt, pg3200.txt +guesser, pg3200.txt +guesses pg3200.txt +guesses, pg3200.txt +guessing pg31100.txt, pg3200.txt +guessing. pg3200.txt +guessing? pg3200.txt +guesswork; pg3200.txt +guest pg31100.txt, pg3200.txt, pg100.txt +guest's pg3200.txt +guest, pg31100.txt, pg3200.txt, pg100.txt +guest-cavaleiro. pg100.txt +guest. pg3200.txt, pg100.txt +guest._] pg31100.txt +guest; pg100.txt +guest? pg100.txt +guests pg31100.txt, pg3200.txt, pg100.txt +guests! pg100.txt +guests, pg31100.txt, pg3200.txt, pg100.txt +guests. pg31100.txt, pg3200.txt, pg100.txt +guests." pg31100.txt +guests: pg100.txt +guests; pg3200.txt, pg100.txt +guests?" pg3200.txt +guiana, pg100.txt +guidance pg31100.txt, pg3200.txt +guidance. pg31100.txt +guide pg31100.txt, pg3200.txt, pg100.txt +guide! pg3200.txt, pg100.txt +guide!" pg3200.txt +guide, pg3200.txt, pg100.txt +guide- pg100.txt +guide--and pg3200.txt +guide-boards, pg3200.txt +guide-book pg3200.txt +guide-book, pg3200.txt +guide-book. pg3200.txt +guide-books, pg3200.txt +guide-in-chief pg3200.txt +guide. pg3200.txt, pg100.txt +guidebook.) pg3200.txt +guided pg31100.txt, pg3200.txt, pg100.txt +guided. pg31100.txt +guiderius pg100.txt +guiderius; pg100.txt +guides pg3200.txt, pg100.txt +guides, pg3200.txt, pg100.txt +guides. pg3200.txt, pg100.txt +guiding pg3200.txt +guiding, pg31100.txt +guild pg3200.txt +guild, pg3200.txt +guild-hall pg3200.txt +guild. pg3200.txt +guildenstern pg100.txt +guildenstern! pg100.txt +guildenstern. pg100.txt +guildenstern.] pg100.txt +guildenstern] pg100.txt +guildenstern]. pg100.txt +guildford pg100.txt +guildford, pg100.txt +guildford. pg100.txt +guildhall. pg3200.txt +guile, pg3200.txt, pg100.txt +guile? pg100.txt +guileless pg31100.txt, pg3200.txt +guileless, pg3200.txt +guileless; pg3200.txt +guilelessly pg3200.txt +guilford pg3200.txt +guilfords pg100.txt +guillaume pg3200.txt +guilt pg31100.txt, pg3200.txt, pg100.txt +guilt, pg100.txt +guilt. pg3200.txt, pg100.txt +guilt; pg100.txt +guilt?" pg31100.txt +guiltian, pg100.txt +guiltiness! pg100.txt +guiltiness, pg100.txt +guiltiness. pg100.txt +guiltless pg100.txt +guiltless, pg100.txt +guilts, pg100.txt +guilty pg31100.txt, pg3200.txt, pg100.txt +guilty!' pg100.txt +guilty, pg31100.txt, pg3200.txt, pg100.txt +guilty-like, pg100.txt +guilty. pg100.txt +guilty." pg3200.txt +guilty? pg100.txt +guinea pg31100.txt, pg3200.txt +guineas pg31100.txt, pg3200.txt +guineas, pg31100.txt +guise, pg100.txt +guitar pg3200.txt +guiteau, pg3200.txt +gul pg100.txt +gulch, pg3200.txt +gulch. pg3200.txt +gules. pg100.txt +gulf pg3200.txt, pg100.txt +gulf, pg3200.txt, pg100.txt +gulf. pg100.txt +gull pg100.txt +gull, pg3200.txt, pg100.txt +gull-catcher. pg100.txt +gull? pg100.txt +gulled pg3200.txt +gullies pg3200.txt +gullion," pg3200.txt +gulls; pg100.txt +gully; pg3200.txt +gum pg3200.txt +gum, pg31100.txt +gum-drops pg3200.txt +gum-tree, pg3200.txt +gum; pg100.txt +gumbert. pg3200.txt +gums pg3200.txt, pg100.txt +gums, pg3200.txt +gun pg3200.txt +gun, pg3200.txt, pg100.txt +gun-fire." pg3200.txt +gun. pg3200.txt +gun." pg3200.txt +gun.] pg3200.txt +gun; pg3200.txt +gun? pg100.txt +gun?" pg3200.txt +gunn: pg3200.txt +gunner pg100.txt +gunny-sacks, pg3200.txt +gunpowder pg3200.txt +gunpowder, pg3200.txt, pg100.txt +gunpowder. pg100.txt +guns pg3200.txt +guns! pg3200.txt +guns!" pg3200.txt +guns" pg3200.txt +guns, pg31100.txt, pg3200.txt +guns. pg3200.txt +guns; pg3200.txt +guns?" pg3200.txt +gunwale pg3200.txt +gunwale, pg3200.txt +gurgle pg3200.txt +gurgling pg3200.txt +gurnet. pg100.txt +gurney pg100.txt +gush pg3200.txt +gushed pg3200.txt +gushers. pg3200.txt +gushing pg3200.txt +gust pg31100.txt +gust, pg100.txt +gust; pg100.txt +gustavus pg31100.txt +gusts pg3200.txt, pg100.txt +gusts, pg3200.txt, pg100.txt +gusts. pg3200.txt +gusts? pg100.txt +gusty pg3200.txt +gut pg3200.txt +gut! pg3200.txt +guten pg3200.txt +gutenberg pg31100.txt, pg3200.txt, pg100.txt +gutenberg, pg100.txt +gutenberg-tm pg31100.txt, pg3200.txt, pg100.txt +gutenberg-tm's pg31100.txt, pg3200.txt, pg100.txt +gutenberg-tm, pg31100.txt, pg3200.txt, pg100.txt +gutenberg-tm. pg31100.txt, pg3200.txt, pg100.txt +gutenberg: pg31100.txt, pg3200.txt, pg100.txt +guthrie), pg3200.txt +gutig. pg3200.txt +guts pg100.txt +guts, pg100.txt +guttanen, pg3200.txt +gutter-snipes!" pg3200.txt +gutter; pg3200.txt +guttered pg3200.txt +gutters pg3200.txt +gutters, pg3200.txt +guttersnipes--" pg3200.txt +guy pg3200.txt +guy, pg3200.txt +guyed pg3200.txt +guys, pg3200.txt +guzzled pg3200.txt +gwalior": pg3200.txt +gwendolen pg3200.txt +gwendolen--" pg3200.txt +gwendolen-why?" pg3200.txt +gwendolen. pg3200.txt +gwendolen." pg3200.txt +gwendolen?" pg3200.txt +gwine pg3200.txt +gwyne pg3200.txt +gymnasium, pg3200.txt +gymnasium; pg3200.txt +gymnastic pg3200.txt +gypsy, pg100.txt +gyves! pg100.txt +gyves, pg100.txt +h" pg3200.txt +h--l. pg3200.txt +h-whack!--bum! pg3200.txt +h. pg3200.txt, pg100.txt +h." pg3200.txt +h.] pg3200.txt +h.h. pg3200.txt +h.w.l. pg3200.txt +ha! pg100.txt +ha!- pg100.txt +ha' pg100.txt +ha'nted pg3200.txt +ha'penny.' pg3200.txt +ha's, pg100.txt +ha't! pg100.txt +ha't. pg100.txt +ha, pg100.txt +ha-ha!" pg3200.txt +ha? pg100.txt +habe pg3200.txt +haben pg3200.txt +haben, pg3200.txt +haben. pg3200.txt +haberdasher pg100.txt +habiliment, pg100.txt +habiliments pg3200.txt +habiliments, pg100.txt +habiliments; pg100.txt +habit pg31100.txt, pg3200.txt, pg100.txt +habit, pg3200.txt, pg100.txt +habit-- pg31100.txt +habit--automatically; pg3200.txt +habit. pg3200.txt, pg100.txt +habit: pg31100.txt +habit; pg3200.txt +habit? pg3200.txt +habit?" pg3200.txt +habitable pg31100.txt +habitable!--or pg31100.txt +habitat pg3200.txt +habitat, pg3200.txt +habitation pg31100.txt, pg3200.txt, pg100.txt +habits pg31100.txt, pg3200.txt, pg100.txt +habits, pg31100.txt, pg3200.txt +habits- pg100.txt +habits. pg31100.txt, pg3200.txt +habits." pg31100.txt +habits; pg31100.txt +habits] pg100.txt +habitually pg31100.txt, pg3200.txt +habituated pg3200.txt +haboolong pg3200.txt +habsburg pg3200.txt +hack'd, pg100.txt +hack'd. pg100.txt +hack'd? pg100.txt +hack, pg3200.txt +hack- pg3200.txt +hack-driver, pg3200.txt +hack. pg3200.txt +hacked pg3200.txt +hacket. pg100.txt +hackett pg3200.txt +hackett's pg3200.txt +hacking pg3200.txt +hackman, pg3200.txt +hackman." pg3200.txt +hackney pg31100.txt +hackneyed, pg31100.txt +hacks pg3200.txt +hacks. pg100.txt +had! pg3200.txt, pg100.txt +had!" pg31100.txt, pg3200.txt +had, pg31100.txt, pg3200.txt, pg100.txt +had--" pg3200.txt +had--and pg3200.txt +had. pg31100.txt, pg3200.txt, pg100.txt +had." pg31100.txt, pg3200.txt +had.' pg3200.txt +had.--i pg31100.txt +had; pg31100.txt, pg3200.txt, pg100.txt +had?" pg3200.txt +hadde pg3200.txt +hading pg3200.txt +hadleyburg pg3200.txt +hadleyburg, pg3200.txt +hadn't pg3200.txt +hadn't), pg3200.txt +hadn't, pg3200.txt +hadn't. pg3200.txt +hadn't: pg3200.txt +hadn't; pg3200.txt +hadndsome pg3200.txt +hads pg3200.txt +hads. pg3200.txt +hads." pg3200.txt +hadst pg100.txt +haf pg3200.txt +hag! pg100.txt +hag. pg100.txt +haggard pg3200.txt +haggard, pg3200.txt, pg100.txt +haggard. pg100.txt +haggled pg3200.txt +hags! pg100.txt +hags? pg100.txt +haie pg3200.txt +haight pg3200.txt +hail pg31100.txt, pg100.txt +hail! pg100.txt +hail, pg3200.txt, pg100.txt +hail-barges, pg3200.txt +hail. pg3200.txt +hailed pg3200.txt +hailstorm. pg3200.txt +hailstorms, pg3200.txt +haimberger pg3200.txt +hain't pg3200.txt +hain't." pg3200.txt +haint pg3200.txt +hair pg31100.txt, pg3200.txt, pg100.txt +hair! pg3200.txt +hair), pg3200.txt +hair, pg31100.txt, pg3200.txt, pg100.txt +hair- pg100.txt +hair--a pg31100.txt +hair--is pg3200.txt +hair--not pg3200.txt +hair--through pg3200.txt +hair-ball pg3200.txt +hair-breadth pg3200.txt +hair-pin. pg3200.txt +hair-pin; pg3200.txt +hair. pg3200.txt, pg100.txt +hair." pg31100.txt, pg3200.txt +hair._] pg31100.txt +hair; pg3200.txt, pg100.txt +hair? pg100.txt +hair?" pg31100.txt +hair?--you pg31100.txt +hair] pg100.txt +hairbreadth pg3200.txt +haired pg3200.txt +hairless pg3200.txt +hairs pg3200.txt, pg100.txt +hairs! pg100.txt +hairs'- pg100.txt +hairs, pg100.txt +hairs. pg100.txt +hairy pg3200.txt, pg100.txt +hal! pg100.txt +hal, pg100.txt +hal. pg100.txt +hal? pg100.txt +halberd pg3200.txt +halberdiers pg3200.txt +halberdiers, pg3200.txt +halberdiers. pg3200.txt +halberds pg3200.txt, pg100.txt +halberds. pg100.txt +halcyon, pg3200.txt +hale pg3200.txt, pg100.txt +hale, pg31100.txt, pg3200.txt +hale. pg3200.txt +hale." pg3200.txt +haleakala pg3200.txt +haleakala--which pg3200.txt +haleakala. pg3200.txt +haled pg3200.txt +half pg31100.txt, pg3200.txt, pg100.txt +half!" pg3200.txt +half!' pg3200.txt +half, pg3200.txt, pg100.txt +half- pg3200.txt +half-- pg3200.txt +half--the pg3200.txt +half-an-hour pg3200.txt +half-an-hour. pg31100.txt +half-ashamed pg31100.txt +half-asleep; pg31100.txt +half-attach'd pg100.txt +half-barrels pg3200.txt +half-brother pg100.txt +half-century pg3200.txt +half-clad pg3200.txt +half-deserved." pg31100.txt +half-dollar, pg3200.txt +half-dozen pg3200.txt +half-drunk pg3200.txt +half-drunken pg3200.txt +half-eaten--certainly pg3200.txt +half-famished, pg3200.txt +half-generation. pg3200.txt +half-holiday. pg31100.txt +half-hour pg31100.txt, pg3200.txt +half-hour." pg31100.txt +half-hour; pg31100.txt +half-kirtles. pg100.txt +half-laughing pg31100.txt +half-mast. pg3200.txt +half-mile pg3200.txt +half-minded pg3200.txt +half-naked pg3200.txt +half-past pg31100.txt, pg3200.txt +half-pennyworth--five pg3200.txt +half-pleased pg31100.txt +half-profit pg3200.txt +half-savage pg3200.txt +half-shirt pg100.txt +half-sister pg3200.txt +half-sole pg3200.txt +half-stretch pg3200.txt +half-way pg3200.txt +half-way--and pg3200.txt +half-white. pg3200.txt +half-witted, pg3200.txt +half-world pg100.txt +half-year pg31100.txt +half-year, pg31100.txt +half-year. pg100.txt +half-yearly?" pg3200.txt +half. pg31100.txt, pg3200.txt, pg100.txt +half." pg31100.txt, pg3200.txt +half.' pg3200.txt +half: pg3200.txt +half; pg3200.txt +half? pg3200.txt +halfpenny. pg100.txt +halfway pg100.txt +hall's pg3200.txt +hall, pg31100.txt, pg3200.txt, pg100.txt +hall-- pg3200.txt +hall--the pg3200.txt +hall-door. pg31100.txt +hall-mark pg3200.txt +hall. pg31100.txt, pg3200.txt, pg100.txt +hall: pg3200.txt +hall; pg31100.txt, pg3200.txt +hall?" pg3200.txt +hall?' pg3200.txt +hallelujah!) pg3200.txt +halliday's pg3200.txt +halliday's: pg3200.txt +halloing pg100.txt +halloo'd pg100.txt +hallooed pg31100.txt +hallooing pg31100.txt +hallooing. pg31100.txt +hallowed pg3200.txt +hallowmas. pg100.txt +hallows pg3200.txt +halls pg3200.txt +halls, pg3200.txt +hallway, pg3200.txt +hally-ekka-lah) pg3200.txt +halo pg3200.txt +halo, pg3200.txt +haloed pg3200.txt +halos?" pg3200.txt +hals, pg100.txt +halsey pg3200.txt +halt pg3200.txt, pg100.txt +halt, pg3200.txt +halt. pg100.txt +halt: pg3200.txt, pg100.txt +halted pg3200.txt +halted, pg3200.txt +halted; pg3200.txt +halter. pg3200.txt, pg100.txt +halting pg3200.txt +halting, pg3200.txt +halting.' pg3200.txt +halting; pg100.txt +halton. pg31100.txt +halves. pg3200.txt +halves; pg31100.txt +ham pg31100.txt, pg3200.txt +ham, pg3200.txt +ham. pg3200.txt +ham; pg3200.txt +hamel's pg3200.txt +hamel." pg3200.txt +hamilton pg3200.txt +hamilton, pg31100.txt +hamlet pg3200.txt, pg100.txt +hamlet! pg100.txt +hamlet's.] pg100.txt +hamlet, pg100.txt +hamlet. pg100.txt +hamlet.' pg100.txt +hamlet.] pg100.txt +hamlet: pg100.txt +hamlet? pg100.txt +hamlet]. pg100.txt +hammer pg3200.txt +hammer, pg100.txt +hammer. pg3200.txt +hammer; pg3200.txt +hammered pg3200.txt +hammering pg3200.txt +hammering, pg3200.txt +hammering. pg100.txt +hammers pg3200.txt +hammers, pg100.txt +hammocks--whence pg3200.txt +hammond pg3200.txt +hamor pg3200.txt +hampering pg3200.txt +hampshire pg3200.txt +hampshire, pg3200.txt +hampshire--" pg3200.txt +hampshire: pg3200.txt +hampton; pg100.txt +hams. pg100.txt +han' pg3200.txt +han's pg3200.txt +han's." pg3200.txt +hand pg31100.txt, pg3200.txt, pg100.txt +hand! pg3200.txt, pg100.txt +hand!" pg31100.txt, pg3200.txt +hand, pg31100.txt, pg3200.txt, pg100.txt +hand- pg100.txt +hand-- pg31100.txt, pg3200.txt +hand--. pg31100.txt +hand--anything?" pg3200.txt +hand--be pg3200.txt +hand--here pg3200.txt +hand--just pg3200.txt +hand--ostensibly; pg3200.txt +hand--special pg3200.txt +hand--we pg3200.txt +hand-bag pg3200.txt +hand-bag; pg3200.txt +hand-baggage." pg3200.txt +hand-breadth pg3200.txt +hand-clapping. pg3200.txt +hand-free pg3200.txt +hand-grip; pg3200.txt +hand-joints, pg3200.txt +hand-made, pg3200.txt +hand-shake pg3200.txt +hand-shaking pg31100.txt +hand-shaking. pg3200.txt +hand-spring. pg3200.txt +hand-writing pg31100.txt, pg3200.txt +hand-writing." pg31100.txt +hand-writing?" pg3200.txt +hand. pg31100.txt, pg3200.txt, pg100.txt +hand." pg31100.txt, pg3200.txt +hand.' pg3200.txt +hand.] pg3200.txt +hand: pg3200.txt, pg100.txt +hand; pg31100.txt, pg3200.txt, pg100.txt +hand;--whether pg31100.txt +hand? pg3200.txt, pg100.txt +hand?" pg31100.txt, pg3200.txt +hand] pg100.txt +handbarrow." pg31100.txt +handcar. pg3200.txt +handed pg31100.txt, pg3200.txt +handed! pg3200.txt +handed. pg3200.txt +handful pg3200.txt +handful; pg3200.txt +handfuls pg3200.txt +handicapped. pg3200.txt +handicraft pg3200.txt, pg100.txt +handicraft, pg3200.txt +handicrafts. pg3200.txt +handicraftsmen. pg100.txt +handier pg3200.txt +handily pg3200.txt +handily, pg3200.txt +handing pg3200.txt +handing; pg3200.txt +handiwork pg3200.txt +handiwork, pg100.txt +handiwork. pg100.txt +handkercher? pg100.txt +handkerchers, pg100.txt +handkerchief pg3200.txt, pg100.txt +handkerchief! pg100.txt +handkerchief, pg31100.txt, pg3200.txt, pg100.txt +handkerchief- pg100.txt +handkerchief. pg3200.txt, pg100.txt +handkerchief." pg3200.txt +handkerchief; pg3200.txt, pg100.txt +handkerchief? pg100.txt +handkerchiefs pg3200.txt +handkerchiefs, pg3200.txt +handkerchiefs. pg3200.txt +handkerchiefs; pg3200.txt +handle pg31100.txt, pg3200.txt, pg100.txt +handle, pg3200.txt, pg100.txt +handle. pg3200.txt +handle; pg3200.txt +handled pg3200.txt +handled!" pg3200.txt +handled, pg3200.txt +handles pg3200.txt +handles, pg3200.txt +handles. pg3200.txt +handling pg3200.txt +handling, pg100.txt +handling. pg3200.txt, pg100.txt +handmade pg3200.txt +hands pg31100.txt, pg3200.txt, pg100.txt +hands! pg100.txt +hands!" pg31100.txt, pg3200.txt +hands, pg31100.txt, pg3200.txt, pg100.txt +hands- pg100.txt +hands-- pg3200.txt, pg100.txt +hands--and pg3200.txt +hands--my pg3200.txt +hands--though pg3200.txt +hands. pg31100.txt, pg3200.txt, pg100.txt +hands." pg31100.txt, pg3200.txt +hands: pg31100.txt, pg3200.txt +hands; pg31100.txt, pg3200.txt, pg100.txt +hands? pg3200.txt, pg100.txt +hands?" pg3200.txt +hands?' pg100.txt +hands?--he pg3200.txt +handsaw pg3200.txt +handsaw. pg100.txt +handshake pg3200.txt +handsome pg31100.txt, pg3200.txt +handsome! pg31100.txt, pg3200.txt +handsome, pg31100.txt, pg3200.txt +handsome. pg31100.txt, pg3200.txt +handsome." pg31100.txt, pg3200.txt +handsome; pg3200.txt +handsome?" pg31100.txt +handsomely pg31100.txt, pg3200.txt +handsomely, pg3200.txt, pg100.txt +handsomely--but pg31100.txt +handsomely. pg3200.txt, pg100.txt +handsomeness. pg100.txt +handsomer pg31100.txt +handsomest pg31100.txt, pg3200.txt +handsomest." pg31100.txt +handwrite?" pg3200.txt +handwriting pg3200.txt +handwriting, pg3200.txt +handwriting. pg31100.txt +handwriting?" pg31100.txt, pg3200.txt +handy pg31100.txt, pg3200.txt +handy, pg3200.txt +handy-dandy, pg100.txt +handy. pg3200.txt +handy." pg3200.txt +handy.' pg3200.txt +hang pg3200.txt, pg100.txt +hang! pg100.txt +hang!' pg100.txt +hang" pg3200.txt +hang'd pg100.txt +hang'd! pg100.txt +hang'd, pg100.txt +hang'd. pg100.txt +hang'd? pg100.txt +hang, pg100.txt +hang--where pg3200.txt +hang. pg3200.txt, pg100.txt +hang." pg3200.txt +hanged pg3200.txt +hanged!" pg3200.txt +hanged, pg3200.txt +hanged. pg3200.txt, pg100.txt +hanged." pg3200.txt +hanged.' pg3200.txt +hanged; pg3200.txt +hangers, pg100.txt +hangers-on pg3200.txt +hangers. pg100.txt +hanging pg31100.txt, pg3200.txt, pg100.txt +hanging's pg100.txt +hanging, pg100.txt +hanging-bout pg3200.txt +hanging. pg3200.txt, pg100.txt +hanging; pg100.txt +hangings, pg3200.txt +hangman pg100.txt +hangman! pg3200.txt +hangman. pg100.txt +hangman; pg100.txt +hangmen pg100.txt +hangs pg3200.txt, pg100.txt +hangs; pg100.txt +hank, pg3200.txt +hank." pg3200.txt +hanks: pg3200.txt +hannah pg3200.txt +hannah!" pg3200.txt +hannah?" pg3200.txt +hanner?" pg3200.txt +hannibal pg3200.txt +hannibal! pg100.txt +hannibal, pg3200.txt, pg100.txt +hannibal- pg100.txt +hannibal. pg3200.txt +hannibal: pg3200.txt +hanover pg3200.txt +hans pg3200.txt +hansom. pg3200.txt +hants." pg3200.txt +hanuman pg3200.txt +hanuman--the pg3200.txt +haole!" pg3200.txt +hap pg3200.txt, pg100.txt +hap, pg3200.txt, pg100.txt +hap. pg100.txt +hap." pg3200.txt +hapless pg31100.txt, pg3200.txt +haply pg100.txt +haply, pg100.txt +happ'd! pg100.txt +happen pg31100.txt, pg3200.txt, pg100.txt +happen'." pg3200.txt +happen'?" pg3200.txt +happen'd. pg100.txt +happen, pg31100.txt, pg3200.txt +happen--how pg3200.txt +happen. pg31100.txt, pg3200.txt, pg100.txt +happen." pg31100.txt, pg3200.txt +happen: pg3200.txt +happen; pg3200.txt, pg100.txt +happen? pg31100.txt, pg3200.txt +happen?" pg31100.txt, pg3200.txt +happen?' pg3200.txt +happened pg31100.txt, pg3200.txt +happened!" pg31100.txt, pg3200.txt +happened, pg3200.txt +happened- pg100.txt +happened--" pg3200.txt +happened--for pg3200.txt +happened--it pg3200.txt +happened. pg31100.txt, pg3200.txt +happened." pg31100.txt, pg3200.txt +happened.' pg3200.txt +happened: pg3200.txt +happened; pg31100.txt, pg3200.txt +happened? pg31100.txt, pg3200.txt +happened?" pg3200.txt +happening pg31100.txt, pg3200.txt +happening--he pg3200.txt +happening. pg3200.txt +happening.' pg3200.txt +happenings pg3200.txt +happenings. pg3200.txt +happens pg31100.txt, pg3200.txt +happens, pg31100.txt, pg3200.txt +happens. pg3200.txt +happens." pg31100.txt, pg3200.txt +happens; pg3200.txt +happier pg31100.txt, pg3200.txt +happier. pg31100.txt, pg100.txt +happier." pg31100.txt +happiest pg31100.txt, pg3200.txt +happily pg31100.txt, pg3200.txt, pg100.txt +happily, pg31100.txt, pg3200.txt, pg100.txt +happily--! pg31100.txt +happily. pg100.txt +happiness pg31100.txt, pg3200.txt, pg100.txt +happiness! pg100.txt +happiness!" pg31100.txt +happiness!--this pg31100.txt +happiness, pg31100.txt, pg3200.txt, pg100.txt +happiness. pg31100.txt, pg3200.txt, pg100.txt +happiness." pg31100.txt, pg3200.txt +happiness; pg31100.txt +happiness?" pg31100.txt +happinesses pg3200.txt +happy pg31100.txt, pg3200.txt, pg100.txt +happy! pg31100.txt, pg3200.txt, pg100.txt +happy!"-- pg31100.txt +happy, pg31100.txt, pg3200.txt, pg100.txt +happy- pg100.txt +happy-land-of-canaan: pg3200.txt +happy-looking pg3200.txt +happy. pg31100.txt, pg3200.txt, pg100.txt +happy." pg31100.txt, pg3200.txt +happy.' pg3200.txt +happy; pg31100.txt, pg3200.txt, pg100.txt +happy? pg31100.txt, pg3200.txt +happy?" pg31100.txt +haps pg3200.txt +haps; pg100.txt +harangue, pg3200.txt +harass pg3200.txt +harassed pg31100.txt, pg3200.txt +harassed. pg3200.txt +harassing pg3200.txt +harassment pg3200.txt +harassment, pg3200.txt +harassments, pg3200.txt +harbinger, pg100.txt +harbinger; pg100.txt +harbison."] pg3200.txt +harbor pg3200.txt +harbor! pg3200.txt +harbor, pg3200.txt +harbor--the pg3200.txt +harbor-boat pg3200.txt +harbor. pg3200.txt +harbor? pg100.txt +harbored pg3200.txt +harborers pg3200.txt +harbour pg31100.txt, pg100.txt +harbour! pg31100.txt +harbour, pg31100.txt +harcourt pg100.txt +hard pg31100.txt, pg3200.txt, pg100.txt +hard! pg100.txt +hard!" pg3200.txt +hard, pg3200.txt, pg100.txt +hard," pg3200.txt +hard- pg100.txt +hard--and pg3200.txt +hard--writing pg3200.txt +hard-beaten pg3200.txt +hard-down. pg3200.txt +hard-handed pg3200.txt +hard-headed pg3200.txt +hard-hearted pg100.txt +hard-working pg3200.txt +hard. pg31100.txt, pg3200.txt, pg100.txt +hard." pg3200.txt +hard: pg100.txt +hard; pg3200.txt, pg100.txt +hard? pg100.txt +hardened pg31100.txt, pg3200.txt +hardened; pg3200.txt +harder pg31100.txt, pg3200.txt, pg100.txt +harder, pg3200.txt +harder. pg3200.txt +harder; pg3200.txt +harder?- pg100.txt +hardest pg3200.txt +hardest, pg3200.txt +hardest-working pg3200.txt +hardest. pg3200.txt +hardiest pg3200.txt +hardiment, pg100.txt +harding, pg3200.txt +hardly pg31100.txt, pg3200.txt, pg100.txt +hardly, pg3200.txt +hardly. pg3200.txt +hardness pg3200.txt, pg100.txt +hardpan pg3200.txt +hardship pg31100.txt, pg3200.txt +hardship, pg3200.txt +hardship. pg31100.txt +hardships pg31100.txt, pg3200.txt +hardships, pg3200.txt +hardships. pg3200.txt +hardware. pg3200.txt +hardy pg3200.txt, pg100.txt +hardy. pg3200.txt +hardy; pg31100.txt +hare pg31100.txt +hare! pg100.txt +hare-brain'd pg100.txt +hare-brained pg3200.txt +hare-lip." pg3200.txt +hare. pg100.txt +harem pg3200.txt +harem, pg3200.txt +harem--a pg3200.txt +harem." pg3200.txt +harem? pg3200.txt +harem?" pg3200.txt +hares pg3200.txt +hares; pg100.txt +harfleur pg100.txt +harfleur, pg100.txt +harfleur. pg100.txt +hargrave, pg3200.txt +hark pg3200.txt +hark! pg100.txt +hark, pg100.txt +hark. pg100.txt +hark: pg100.txt +harking pg3200.txt +harkness pg3200.txt +harknesses, pg3200.txt +harlot pg100.txt +harlot, pg3200.txt +harlot-brow, pg100.txt +harlotry, pg100.txt +harlots. pg100.txt +harm pg31100.txt, pg3200.txt, pg100.txt +harm! pg3200.txt, pg100.txt +harm!" pg3200.txt +harm'd! pg100.txt +harm, pg3200.txt, pg100.txt +harm--and pg3200.txt +harm--it pg3200.txt +harm. pg31100.txt, pg3200.txt, pg100.txt +harm." pg31100.txt, pg3200.txt +harm: pg100.txt +harm; pg3200.txt, pg100.txt +harm? pg3200.txt, pg100.txt +harm?" pg3200.txt +harmed pg3200.txt +harmed; pg100.txt +harmful pg3200.txt +harmful, pg3200.txt +harmless pg31100.txt, pg3200.txt, pg100.txt +harmless, pg31100.txt +harmless." pg3200.txt +harmonious pg3200.txt +harmonious, pg3200.txt +harmonize pg3200.txt +harmony pg31100.txt, pg3200.txt, pg100.txt +harmony! pg100.txt +harmony, pg31100.txt, pg3200.txt +harmony. pg100.txt +harmony." pg3200.txt +harmony; pg31100.txt, pg100.txt +harmony? pg100.txt +harms pg100.txt +harms! pg100.txt +harms, pg100.txt +harms. pg100.txt +harness pg3200.txt +harness, pg3200.txt +harness. pg3200.txt +harness; pg31100.txt +harnessed pg3200.txt +harney pg3200.txt +harp pg31100.txt, pg100.txt +harp, pg31100.txt, pg3200.txt +harp. pg100.txt +harp.' pg100.txt +harp; pg31100.txt, pg100.txt +harper pg3200.txt +harper's pg3200.txt +harper's. pg3200.txt +harper, pg3200.txt +harpers pg3200.txt +harpers). pg3200.txt +harpoon pg3200.txt +harps pg3200.txt +harps, pg3200.txt +harpy; pg100.txt +harried pg3200.txt +harriet pg31100.txt, pg3200.txt +harriet! pg31100.txt +harriet!" pg31100.txt +harriet's pg31100.txt, pg3200.txt +harriet's, pg31100.txt +harriet's. pg31100.txt +harriet, pg31100.txt +harriet. pg31100.txt, pg3200.txt +harriet." pg31100.txt +harriet.--they pg31100.txt +harriet; pg31100.txt +harriet;--"well--and--and pg31100.txt +harriet? pg31100.txt +harrington pg3200.txt +harrington. pg3200.txt +harris pg3200.txt +harris!' pg3200.txt +harris's pg3200.txt +harris, pg31100.txt, pg3200.txt +harris--" pg3200.txt +harris. pg3200.txt +harrisburg. pg3200.txt +harrisburg." pg3200.txt +harrisites, pg3200.txt +harrison pg31100.txt +harrison, pg3200.txt +harrison. pg3200.txt +harrod, pg3200.txt +harrow, pg3200.txt +harrow-teeth. pg3200.txt +harry pg31100.txt, pg3200.txt +harry! pg100.txt +harry!" pg31100.txt +harry's pg3200.txt +harry, pg3200.txt, pg100.txt +harry--upon pg3200.txt +harry. pg3200.txt, pg100.txt +harry; pg3200.txt +harry?" pg3200.txt +harrying pg3200.txt +harsh pg3200.txt, pg100.txt +harsh, pg3200.txt +harsh. pg100.txt +harsh; pg100.txt +harshly pg3200.txt +harshly, pg31100.txt +harshly. pg3200.txt +harshness pg3200.txt +harshness, pg31100.txt +hart pg100.txt +hart, pg31100.txt, pg100.txt +hart. pg100.txt +harte pg3200.txt +harte, pg3200.txt +harte. pg3200.txt +harted pg3200.txt +hartfield pg31100.txt +hartfield! pg31100.txt +hartfield, pg31100.txt +hartfield--if pg31100.txt +hartfield. pg31100.txt +hartfield." pg31100.txt +hartfield; pg31100.txt +hartford pg3200.txt +hartford). pg3200.txt +hartford, pg3200.txt +hartford--for pg3200.txt +hartford--to pg3200.txt +hartford. pg3200.txt +hartford." pg3200.txt +hartford: pg3200.txt +hartlebury. pg3200.txt +hartwig pg3200.txt +harum-scarum pg3200.txt +harum-scarum, pg3200.txt +harvard pg3200.txt +harvest pg3200.txt +harvest! pg100.txt +harvest, pg31100.txt, pg100.txt +harvest-home. pg100.txt +harvest. pg100.txt +harvest." pg3200.txt +harvests pg3200.txt +harvey pg3200.txt +harvey's." pg3200.txt +harvey, pg3200.txt +harvey--" pg3200.txt +harville pg31100.txt +harville's pg31100.txt +harville, pg31100.txt +harville. pg31100.txt +harville; pg31100.txt +harvilles pg31100.txt +harvilles, pg31100.txt +harvilles; pg31100.txt +has!"' pg3200.txt +has, pg31100.txt, pg3200.txt, pg100.txt +has,) pg3200.txt +has- pg100.txt +has. pg31100.txt, pg3200.txt, pg100.txt +has." pg3200.txt +has; pg3200.txt, pg100.txt +has? pg100.txt +has?" pg3200.txt +hasn't pg3200.txt +hasn't. pg3200.txt +hasn't." pg3200.txt +hasn't?" pg3200.txt +hasp. pg3200.txt +hast pg3200.txt, pg100.txt +hast, pg100.txt +hast. pg100.txt +haste pg31100.txt, pg3200.txt, pg100.txt +haste!" pg31100.txt +haste, pg31100.txt, pg3200.txt, pg100.txt +haste. pg31100.txt, pg3200.txt, pg100.txt +haste." pg3200.txt +haste: pg3200.txt, pg100.txt +haste; pg100.txt +haste? pg100.txt +hasten pg3200.txt, pg100.txt +hastened pg31100.txt, pg3200.txt +hastening pg31100.txt +hastily pg31100.txt, pg3200.txt, pg100.txt +hastily, pg31100.txt +hastily. pg31100.txt, pg100.txt +hastily? pg100.txt +hastings pg3200.txt, pg100.txt +hastings, pg3200.txt, pg100.txt +hastings. pg3200.txt, pg100.txt +hastings; pg3200.txt, pg100.txt +hastings? pg100.txt +hasty pg31100.txt, pg100.txt +hasty, pg31100.txt +hasty-footed pg100.txt +hasty. pg3200.txt +hasty; pg100.txt +hat! pg3200.txt +hat!" pg3200.txt +hat, pg31100.txt, pg3200.txt, pg100.txt +hat--" pg3200.txt +hat--i pg3200.txt +hat--if pg3200.txt +hat-brim pg3200.txt +hat-rack." pg3200.txt +hat-rack.' pg3200.txt +hat. pg31100.txt, pg3200.txt, pg100.txt +hat." pg31100.txt, pg3200.txt +hat.] pg100.txt +hat; pg3200.txt, pg100.txt +hat? pg31100.txt +hatch pg3200.txt +hatch'd, pg100.txt +hatch, pg100.txt +hatch. pg3200.txt, pg100.txt +hatch; pg100.txt +hatched pg3200.txt +hatched. pg3200.txt +hatches, pg100.txt +hatches; pg100.txt +hatchet pg3200.txt +hatchet, pg3200.txt +hatchet. pg3200.txt, pg100.txt +hatchets pg3200.txt +hatchets, pg3200.txt +hatching, pg100.txt +hatching? pg3200.txt +hatchments! pg3200.txt +hatchments, pg3200.txt +hatchments. pg3200.txt +hatchway, pg3200.txt +hate pg31100.txt, pg3200.txt, pg100.txt +hate! pg100.txt +hate', pg100.txt +hate, pg3200.txt, pg100.txt +hate----. pg3200.txt +hate-inspiring pg3200.txt +hate. pg100.txt +hate." pg3200.txt +hate: pg100.txt +hate; pg3200.txt, pg100.txt +hate? pg100.txt +hated pg31100.txt, pg3200.txt, pg100.txt +hated, pg100.txt +hateful pg31100.txt +hateful!"--and pg3200.txt +hateful. pg100.txt +hateful; pg3200.txt +hatefuler pg3200.txt +hatefully pg3200.txt +hates pg31100.txt, pg3200.txt, pg100.txt +hates. pg100.txt +hates." pg3200.txt +hates; pg100.txt +hateth pg3200.txt +hatfield, pg31100.txt +hatful pg3200.txt +hatful, pg3200.txt +hath pg3200.txt, pg100.txt +hath! pg100.txt +hath, pg3200.txt, pg100.txt +hath- pg100.txt +hath. pg100.txt +hath.' pg100.txt +hath; pg100.txt +hathaway. pg3200.txt +hating pg3200.txt +hatred pg31100.txt, pg3200.txt, pg100.txt +hatred, pg31100.txt +hatred. pg100.txt +hats pg3200.txt, pg100.txt +hats, pg3200.txt, pg100.txt +hats--go pg3200.txt +hats. pg3200.txt, pg100.txt +hats?" pg3200.txt +hatte. pg3200.txt +hatter. pg3200.txt +hattin, pg3200.txt +hattin. pg3200.txt +hauberk pg3200.txt +hauberks, pg3200.txt +haughtiness pg3200.txt +haughty pg31100.txt, pg3200.txt +haughty, pg31100.txt, pg3200.txt +haul pg3200.txt +haul, pg3200.txt +haul. pg3200.txt +hauled pg3200.txt +hauling pg3200.txt +hauls pg3200.txt +haumette pg3200.txt +haumette?" pg3200.txt +haunch; pg100.txt +haunt pg100.txt +haunt, pg100.txt +haunted pg31100.txt, pg3200.txt, pg100.txt +haunted, pg100.txt +haunted. pg100.txt +haunts pg31100.txt +haunts, pg3200.txt +haunts. pg100.txt +hauraki pg3200.txt +hause pg3200.txt +hause. pg3200.txt +hautboys. pg100.txt +haute pg3200.txt +hauteur pg3200.txt +havana pg3200.txt +havana. pg3200.txt +have! pg31100.txt, pg100.txt +have!" pg31100.txt +have't. pg100.txt +have, pg31100.txt, pg3200.txt, pg100.txt +have,' pg3200.txt +have- pg100.txt +have-- pg3200.txt +have--it pg3200.txt +have-at-him. pg100.txt +have. pg31100.txt, pg3200.txt, pg100.txt +have." pg31100.txt, pg3200.txt +have.' pg3200.txt +have: pg3200.txt +have; pg31100.txt, pg3200.txt, pg100.txt +have;--and pg31100.txt +have? pg31100.txt, pg3200.txt, pg100.txt +have?" pg31100.txt, pg3200.txt +have?' pg3200.txt +have] pg3200.txt +havelock pg3200.txt +haven pg100.txt +haven't pg3200.txt +haven't. pg3200.txt +haven't." pg3200.txt +haven't? pg3200.txt +haven, pg3200.txt, pg100.txt +haven- pg100.txt +haven. pg100.txt +haven." pg3200.txt +haven: pg3200.txt +haven? pg100.txt +havens. pg100.txt +havildar pg3200.txt +havin'? pg3200.txt +having pg31100.txt, pg3200.txt, pg100.txt +having, pg31100.txt, pg3200.txt +having. pg31100.txt, pg3200.txt, pg100.txt +having." pg31100.txt +having; pg3200.txt +haviour, pg100.txt +havoc pg3200.txt, pg100.txt +havoc. pg3200.txt +haw pg3200.txt +haw-hawed. pg3200.txt +haw-hawing pg3200.txt +hawaii pg3200.txt +hawaii, pg3200.txt +hawaii: pg3200.txt +hawaiian pg3200.txt +hawes, pg3200.txt +hawk pg100.txt +hawk. pg100.txt +hawk." pg3200.txt +hawkeye pg3200.txt +hawkeye, pg3200.txt +hawkeye--long-rifle-leather-stocking- pg3200.txt +hawkeye--my pg3200.txt +hawkeye. pg3200.txt +hawkeye." pg3200.txt +hawkeye?" pg3200.txt +hawking pg3200.txt +hawking, pg100.txt +hawkins pg3200.txt +hawkins!" pg3200.txt +hawkins!--good pg31100.txt +hawkins!--well, pg31100.txt +hawkins, pg31100.txt, pg3200.txt +hawkins," pg3200.txt +hawkins--" pg31100.txt, pg3200.txt +hawkins--general pg3200.txt +hawkins. pg31100.txt, pg3200.txt +hawkins." pg31100.txt, pg3200.txt +hawkins: pg3200.txt +hawkins?" pg3200.txt +hawkinses pg3200.txt +hawkwood, pg3200.txt +hawn, pg3200.txt +hawnet." pg3200.txt +hawthorn pg100.txt +hawthornden pg3200.txt +hay pg3200.txt +hay! pg3200.txt +hay's pg3200.txt +hay, pg3200.txt +hay-wagon pg3200.txt +hay. pg3200.txt, pg100.txt +hay: pg100.txt +hayes's pg3200.txt +hayes. pg3200.txt +haymarket pg3200.txt +hays, pg3200.txt +hayseed pg3200.txt +haystack. pg3200.txt +haystack; pg3200.txt +hayter pg31100.txt +hayters." pg31100.txt +hazard pg100.txt +hazard. pg100.txt +hazarded pg31100.txt +hazards pg31100.txt +hazards, pg31100.txt +hazards. pg3200.txt +haze pg3200.txt +haze. pg3200.txt +hazel-twig pg100.txt +hazelnut, pg100.txt +hazen, pg3200.txt +hazy pg3200.txt +he! pg100.txt +he!" pg3200.txt +he!' pg100.txt +he!-waw-he!" pg3200.txt +he'd pg3200.txt +he'll pg3200.txt, pg100.txt +he'p pg3200.txt +he's pg3200.txt, pg100.txt +he) pg31100.txt +he). pg31100.txt +he, pg31100.txt, pg3200.txt, pg100.txt +he,--for pg3200.txt +he- pg100.txt +he-- pg3200.txt +he--" pg3200.txt +he--' pg3200.txt +he---- pg3200.txt +he--er--" pg3200.txt +he--has pg3200.txt +he--he--well pg3200.txt +he--well, pg3200.txt +he--well?" pg3200.txt +he. pg31100.txt, pg3200.txt, pg100.txt +he." pg3200.txt +he.' pg3200.txt +he.) pg31100.txt +he: pg3200.txt, pg100.txt +he:--"i pg31100.txt +he; pg100.txt +he? pg31100.txt, pg3200.txt, pg100.txt +he?" pg31100.txt, pg3200.txt +he?' pg3200.txt +he?--three pg31100.txt +head pg31100.txt, pg3200.txt, pg100.txt +head! pg31100.txt, pg3200.txt, pg100.txt +head!" pg31100.txt, pg3200.txt +head!' pg100.txt +head'--" pg3200.txt +head'--and pg3200.txt +head, pg31100.txt, pg3200.txt, pg100.txt +head- pg100.txt +head-- pg3200.txt +head--and pg3200.txt +head--it pg3200.txt +head--long pg3200.txt +head--rubbed pg3200.txt +head--saying: pg3200.txt +head--so pg3200.txt +head--three pg3200.txt +head-boards pg3200.txt +head-dress, pg3200.txt +head-dress. pg3200.txt +head-lines, pg3200.txt +head-on, pg3200.txt +head-piece. pg100.txt +head-pieces. pg100.txt +head-quarters pg3200.txt +head-shake, pg100.txt +head-something pg100.txt +head-tenor pg3200.txt +head. pg31100.txt, pg3200.txt, pg100.txt +head." pg31100.txt, pg3200.txt +head.' pg3200.txt +head.] pg3200.txt, pg100.txt +head: pg3200.txt, pg100.txt +head; pg31100.txt, pg3200.txt, pg100.txt +head? pg100.txt +head?" pg31100.txt, pg3200.txt +head?' pg100.txt +head] pg100.txt +headache, pg31100.txt, pg3200.txt +headache. pg3200.txt +headache." pg31100.txt +headache; pg3200.txt +headaches. pg3200.txt +headboard pg3200.txt +headborough. pg100.txt +headed pg3200.txt +headed, pg3200.txt +heading pg3200.txt +heading. pg3200.txt +headings.) pg3200.txt +headless pg100.txt +headline pg3200.txt +headlong. pg100.txt +headpiece. pg3200.txt +headquarters pg3200.txt +headquarters, pg3200.txt +headquarters. pg31100.txt, pg3200.txt +heads pg31100.txt, pg3200.txt, pg100.txt +heads! pg100.txt +heads, pg3200.txt, pg100.txt +heads- pg100.txt +heads. pg3200.txt, pg100.txt +heads; pg3200.txt, pg100.txt +heads? pg100.txt +heads] pg100.txt +headstrong. pg31100.txt +headway pg3200.txt +headway!' pg3200.txt +headway. pg3200.txt +heah pg3200.txt +heah's pg3200.txt +heah, pg3200.txt +heah--" pg3200.txt +heal pg3200.txt, pg100.txt +heal'd; pg100.txt +heal. pg3200.txt +heal.-- pg31100.txt +healed pg31100.txt, pg3200.txt +healed, pg3200.txt +healed." pg3200.txt +healer pg3200.txt +healer, pg3200.txt +healing pg3200.txt +healing, pg3200.txt +healing-power pg3200.txt +healing. pg3200.txt +healing.' pg3200.txt +heals pg3200.txt +health pg31100.txt, pg3200.txt, pg100.txt +health! pg100.txt +health" pg3200.txt +health) pg3200.txt +health, pg31100.txt, pg3200.txt, pg100.txt +health--and pg31100.txt, pg3200.txt +health--just pg31100.txt +health-officers pg3200.txt +health. pg31100.txt, pg3200.txt, pg100.txt +health." pg31100.txt, pg3200.txt +health.--a pg31100.txt +health: pg3200.txt +health; pg31100.txt, pg3200.txt, pg100.txt +health;' pg31100.txt +health], pg3200.txt +healthful pg31100.txt +healthier, pg3200.txt +healths pg100.txt +healthy pg31100.txt, pg3200.txt +healthy, pg3200.txt +healthy-minded. pg3200.txt +healthy. pg3200.txt +healthy." pg3200.txt +heap pg3200.txt, pg100.txt +heap'd pg100.txt +heap'st pg100.txt +heap- pg100.txt +heaping pg3200.txt +heaps pg3200.txt, pg100.txt +heaps, pg100.txt +hear pg31100.txt, pg3200.txt, pg100.txt +hear! pg100.txt +hear'st pg100.txt +hear't. pg100.txt +hear, pg31100.txt, pg3200.txt, pg100.txt +hear- pg100.txt +hear-- pg31100.txt +hear. pg31100.txt, pg3200.txt, pg100.txt +hear." pg31100.txt, pg3200.txt +hear.--they pg31100.txt +hear: pg3200.txt +hear; pg31100.txt, pg3200.txt, pg100.txt +hear? pg3200.txt, pg100.txt +hear?" pg31100.txt, pg3200.txt +hear?--or pg3200.txt +heard pg31100.txt, pg3200.txt, pg100.txt +heard! pg3200.txt, pg100.txt +heard'st. pg100.txt +heard, pg31100.txt, pg3200.txt, pg100.txt +heard- pg100.txt +heard--" pg3200.txt +heard--"it's pg3200.txt +heard--and pg31100.txt +heard--young pg3200.txt +heard. pg31100.txt, pg3200.txt, pg100.txt +heard." pg31100.txt, pg3200.txt +heard.] pg100.txt +heard: pg100.txt +heard; pg3200.txt, pg100.txt +heard? pg100.txt +heard] pg100.txt +heare: pg3200.txt +hearer! pg100.txt +hearers pg3200.txt, pg100.txt +hearers. pg3200.txt +hearing pg31100.txt, pg3200.txt, pg100.txt +hearing! pg3200.txt +hearing, pg31100.txt, pg3200.txt, pg100.txt +hearing--looked pg31100.txt +hearing. pg31100.txt, pg3200.txt, pg100.txt +hearing: pg100.txt +hearing; pg31100.txt, pg3200.txt, pg100.txt +hearing? pg100.txt +hearkens pg3200.txt +hears pg31100.txt, pg3200.txt, pg100.txt +hears; pg100.txt +hearsay pg3200.txt +hearsay, pg3200.txt +hearsay. pg3200.txt, pg100.txt +hearsay." pg3200.txt +hearse pg3200.txt, pg100.txt +hearse, pg3200.txt, pg100.txt +hearse--sho! pg3200.txt +hearse-horse!' pg3200.txt +hearse. pg3200.txt +hearse; pg100.txt +hearse?" pg3200.txt +hearses, pg3200.txt +hearses; pg3200.txt +heart pg31100.txt, pg3200.txt, pg100.txt +heart! pg31100.txt, pg3200.txt, pg100.txt +heart!" pg3200.txt +heart'; pg100.txt +heart's pg3200.txt +heart's--truth. pg3200.txt +heart, pg31100.txt, pg3200.txt, pg100.txt +heart," pg3200.txt +heart,' pg100.txt +heart- pg100.txt +heart-- pg3200.txt +heart--for pg3200.txt +heart--the pg3200.txt +heart--then pg3200.txt +heart-blood pg100.txt +heart-breaking pg3200.txt +heart-burn'd. pg100.txt +heart-burnings, pg3200.txt +heart-burnings. pg3200.txt +heart-felt pg31100.txt +heart-heaviness, pg100.txt +heart-secrets pg3200.txt +heart-sinking pg3200.txt +heart-sorrowing pg100.txt +heart-string pg100.txt +heart-strings. pg100.txt +heart-whole. pg100.txt +heart. pg31100.txt, pg3200.txt, pg100.txt +heart." pg31100.txt, pg3200.txt +heart.' pg3200.txt +heart.[2] pg3200.txt +heart.] pg100.txt +heart: pg3200.txt, pg100.txt +heart; pg31100.txt, pg3200.txt, pg100.txt +heart? pg3200.txt, pg100.txt +heart?' pg100.txt +heartache pg3200.txt +heartache, pg3200.txt +heartache." pg31100.txt +heartbreak pg3200.txt +heartbreak. pg100.txt +heartbreaking pg3200.txt +heartbroken pg3200.txt +heartburnings pg31100.txt +heartburnings. pg3200.txt +hearted pg31100.txt +heartened pg3200.txt +heartening pg3200.txt +hearth, pg100.txt +hearth. pg3200.txt, pg100.txt +hearth." pg3200.txt +hearthstone pg3200.txt +hearthstone. pg3200.txt +heartier pg3200.txt +hearties!" pg3200.txt +heartiest pg31100.txt, pg3200.txt +heartily pg31100.txt, pg3200.txt, pg100.txt +heartily! pg100.txt +heartily, pg3200.txt, pg100.txt +heartily. pg31100.txt, pg3200.txt, pg100.txt +heartily; pg100.txt +heartiness, pg31100.txt +heartless pg3200.txt +heartless!" pg3200.txt +heartless, pg3200.txt +heartless. pg3200.txt +heartly, pg100.txt +hearts pg31100.txt, pg3200.txt, pg100.txt +hearts! pg3200.txt, pg100.txt +hearts!" pg31100.txt, pg3200.txt +hearts, pg3200.txt, pg100.txt +hearts--" pg3200.txt +hearts. pg31100.txt, pg3200.txt, pg100.txt +hearts." pg31100.txt +hearts: pg3200.txt +hearts; pg3200.txt, pg100.txt +hearts? pg100.txt +heartstrings pg100.txt +heartstrings, pg100.txt +heartstrings." pg3200.txt +hearty pg31100.txt, pg3200.txt +hearty!" pg3200.txt +hearty, pg3200.txt +hearty. pg3200.txt +heat pg31100.txt, pg3200.txt, pg100.txt +heat, pg3200.txt, pg100.txt +heat-lightning pg3200.txt +heat. pg3200.txt, pg100.txt +heat." pg31100.txt +heat; pg3200.txt +heat? pg100.txt +heat?" pg31100.txt +heated pg3200.txt +heated, pg31100.txt, pg3200.txt +heated. pg100.txt +heath pg100.txt +heath. pg31100.txt, pg100.txt +heathen pg3200.txt +heathen, pg100.txt +heathens?" pg3200.txt +heating pg100.txt +heats pg100.txt +heav'n pg100.txt +heave pg3200.txt, pg100.txt +heave, pg3200.txt +heave. pg31100.txt, pg100.txt +heave; pg100.txt +heaved pg3200.txt +heaven pg31100.txt, pg3200.txt, pg100.txt +heaven! pg31100.txt, pg3200.txt, pg100.txt +heaven's pg31100.txt, pg3200.txt +heaven, pg3200.txt, pg100.txt +heaven- pg100.txt +heaven--a pg3200.txt +heaven. pg3200.txt, pg100.txt +heaven." pg3200.txt +heaven: pg3200.txt, pg100.txt +heaven; pg3200.txt, pg100.txt +heaven? pg3200.txt, pg100.txt +heaven?" pg3200.txt +heavenly pg3200.txt, pg100.txt +heavenly, pg100.txt +heavens pg3200.txt, pg100.txt +heavens! pg31100.txt, pg100.txt +heavens!" pg3200.txt +heavens!--what pg31100.txt +heavens, pg3200.txt, pg100.txt +heavens," pg3200.txt +heavens. pg3200.txt, pg100.txt +heavens? pg100.txt +heavenward. pg3200.txt +heaves pg3200.txt, pg100.txt +heavier pg3200.txt, pg100.txt +heavier! pg100.txt +heavier. pg3200.txt, pg100.txt +heaviest pg31100.txt, pg3200.txt +heaviest--floors, pg3200.txt +heavily pg31100.txt, pg3200.txt +heavily, pg100.txt +heavily. pg3200.txt, pg100.txt +heaviness pg100.txt +heaviness, pg100.txt +heaviness. pg100.txt +heaviness; pg100.txt +heaving pg3200.txt +heavings- pg100.txt +heavy pg31100.txt, pg3200.txt, pg100.txt +heavy! pg100.txt +heavy, pg3200.txt +heavy-thick, pg100.txt +heavy. pg3200.txt, pg100.txt +heavy." pg3200.txt +heavy; pg100.txt +heavy? pg100.txt +heavyhearted, pg3200.txt +hebrew pg3200.txt +hebrew, pg3200.txt +hecate, pg100.txt +hecate. pg100.txt +hector pg100.txt +hector! pg100.txt +hector's. pg100.txt +hector, pg100.txt +hector. pg100.txt +hector; pg100.txt +hector? pg100.txt +hectors. pg100.txt +hecuba! pg100.txt +hecuba, pg100.txt +hecuba. pg3200.txt, pg100.txt +hecuba? pg100.txt +hedge pg31100.txt +hedge, pg31100.txt, pg100.txt +hedge-corner. pg100.txt +hedge. pg100.txt +hedge; pg100.txt +hedged pg3200.txt +hedgehogs pg100.txt +hedges pg3200.txt, pg100.txt +hedges, pg100.txt +hedges--nothing pg3200.txt +heed pg3200.txt, pg100.txt +heed, pg100.txt +heed. pg100.txt +heed." pg3200.txt +heed; pg100.txt +heeded. pg3200.txt, pg100.txt +heeded; pg3200.txt +heedful pg3200.txt +heedfully. pg100.txt +heedless pg3200.txt +heedlessest pg3200.txt +heedlessly pg3200.txt +heedlessness, pg3200.txt +heedlessness. pg3200.txt +heel pg3200.txt, pg100.txt +heel, pg3200.txt, pg100.txt +heel-bottom, pg3200.txt +heel. pg3200.txt, pg100.txt +heel; pg3200.txt, pg100.txt +heels pg3200.txt, pg100.txt +heels, pg3200.txt, pg100.txt +heels- pg100.txt +heels. pg3200.txt, pg100.txt +heels.' pg3200.txt +heels; pg3200.txt, pg100.txt +heels? pg100.txt +heeltaps!" pg3200.txt +heft pg3200.txt +heidelberg pg3200.txt +heidelberg, pg3200.txt +heidelberg. pg3200.txt +heidelberg: pg3200.txt +heidelberg] pg3200.txt +heifer pg3200.txt +heifers pg3200.txt +heigh-ho! pg100.txt +height pg31100.txt, pg3200.txt, pg100.txt +height, pg31100.txt, pg3200.txt, pg100.txt +height. pg3200.txt, pg100.txt +height; pg100.txt +height? pg100.txt +heighten'd, pg100.txt +heightened pg31100.txt, pg3200.txt +heightened. pg31100.txt +heightened." pg31100.txt +heightening pg3200.txt +heights pg3200.txt +heights, pg3200.txt +heights. pg3200.txt +heights; pg3200.txt +heilbronn pg3200.txt +heilbronn, pg3200.txt +heilige-- pg3200.txt +heine. pg3200.txt +heinous pg100.txt +heir! pg100.txt +heir, pg3200.txt, pg100.txt +heir- pg100.txt +heir--bombay pg3200.txt +heir-apparents, pg31100.txt +heir. pg3200.txt, pg100.txt +heir; pg100.txt +heir? pg100.txt +heiress pg31100.txt, pg100.txt +heirloom pg3200.txt +heirs pg3200.txt +heirs"--the pg3200.txt +heirs'. pg100.txt +heirs, pg100.txt +heirs- pg100.txt +heirs--" pg3200.txt +heirs. pg3200.txt, pg100.txt +heirs." pg31100.txt +held pg31100.txt, pg3200.txt, pg100.txt +held, pg3200.txt, pg100.txt +held. pg100.txt +held." pg31100.txt +held: pg100.txt +helen pg3200.txt, pg100.txt +helen's, pg100.txt +helen's. pg3200.txt +helen, pg3200.txt, pg100.txt +helen. pg100.txt +helen; pg3200.txt +helen? pg100.txt +helena pg100.txt +helena! pg100.txt +helena, pg3200.txt, pg100.txt +helena. pg100.txt +helena.' pg100.txt +helena; pg100.txt +helenus pg100.txt +helenus, pg100.txt +helenus. pg100.txt +helicons? pg100.txt +heliopolis: pg3200.txt +hell pg3200.txt, pg100.txt +hell! pg3200.txt, pg100.txt +hell!" pg3200.txt +hell, pg3200.txt, pg100.txt +hell- pg100.txt +hell--mouths pg3200.txt +hell-th, pg3200.txt +hell. pg3200.txt, pg100.txt +hell; pg3200.txt, pg100.txt +hell? pg3200.txt, pg100.txt +heller!" pg3200.txt +hellespont, pg100.txt +hellespont. pg3200.txt, pg100.txt +hellions, pg3200.txt +hellishness." pg3200.txt +hello! pg3200.txt +hello, pg3200.txt +hello-central pg3200.txt +hello-central." pg3200.txt +helly pg3200.txt +helm pg3200.txt, pg100.txt +helm! pg100.txt +helm, pg3200.txt, pg100.txt +helm. pg100.txt +helm; pg100.txt +helmet pg3200.txt, pg100.txt +helmet, pg3200.txt +helmet. pg3200.txt +helmet; pg3200.txt +helmeted pg3200.txt +helms pg3200.txt +helms, pg100.txt +heloise pg3200.txt +heloise, pg3200.txt +heloise--a pg3200.txt +heloise--for pg3200.txt +help pg31100.txt, pg3200.txt, pg100.txt +help! pg100.txt +help!" pg100.txt +help'd- pg100.txt +help, pg3200.txt, pg100.txt +help- pg3200.txt +help--and pg3200.txt +help. pg31100.txt, pg3200.txt, pg100.txt +help." pg31100.txt, pg3200.txt +help.' pg3200.txt +help.... pg3200.txt +help: pg3200.txt +help; pg31100.txt, pg3200.txt, pg100.txt +help? pg3200.txt, pg100.txt +help?" pg3200.txt +helped pg31100.txt, pg3200.txt +helped. pg3200.txt +helped: pg3200.txt +helped; pg3200.txt +helper. pg3200.txt +helpful pg3200.txt +helpful. pg3200.txt +helping pg31100.txt, pg3200.txt, pg100.txt +helpless pg31100.txt, pg3200.txt +helpless, pg31100.txt, pg3200.txt +helpless. pg3200.txt +helpless." pg3200.txt +helpless; pg31100.txt +helplessness pg3200.txt +helplessness, pg31100.txt, pg3200.txt +helps pg3200.txt, pg100.txt +helps, pg3200.txt +helps. pg3200.txt +helter-skelter pg3200.txt +hem! pg31100.txt +hem. pg100.txt +heme's pg100.txt +hemisphere pg3200.txt +hemm'd pg100.txt +hemorrhage, pg3200.txt +hemp-seed! pg100.txt +hems. pg100.txt +hen, pg3200.txt, pg100.txt +hen. pg100.txt +hen?" pg3200.txt +hence pg31100.txt, pg3200.txt, pg100.txt +hence! pg3200.txt, pg100.txt +hence, pg31100.txt, pg3200.txt, pg100.txt +hence--just pg3200.txt +hence--to pg31100.txt +hence-going pg100.txt +hence. pg31100.txt, pg3200.txt, pg100.txt +hence." pg3200.txt +hence: pg100.txt +hence; pg31100.txt, pg100.txt +hence? pg31100.txt, pg100.txt +henceforth pg3200.txt, pg100.txt +henceforth, pg100.txt +henceforth--" pg3200.txt +henceforth. pg3200.txt +henceforward pg31100.txt +henceforward, pg100.txt +henceforward. pg3200.txt +henchman. pg100.txt +henchmen pg3200.txt +hendering pg3200.txt +hendon pg3200.txt +hendon's pg3200.txt +hendon, pg3200.txt +hendon. pg3200.txt +hendon?" pg3200.txt +hendricks pg3200.txt +henrietta pg31100.txt +henrietta's pg31100.txt +henrietta, pg31100.txt +henry pg31100.txt, pg3200.txt, pg100.txt +henry! pg100.txt +henry!" pg3200.txt +henry's pg31100.txt +henry, pg31100.txt, pg3200.txt, pg100.txt +henry- pg100.txt +henry--though pg31100.txt +henry. pg31100.txt, pg3200.txt, pg100.txt +henry." pg31100.txt, pg3200.txt +henry: pg100.txt +henry; pg31100.txt, pg3200.txt +henry? pg3200.txt +henrys pg3200.txt +hens pg3200.txt +hent. pg100.txt +henton. pg100.txt +henton? pg100.txt +heppin' pg3200.txt +her! pg31100.txt, pg3200.txt, pg100.txt +her!" pg31100.txt, pg3200.txt +her!' pg3200.txt, pg100.txt +her!- pg100.txt +her!--who pg31100.txt +her" pg3200.txt +her's pg31100.txt +her's!" pg31100.txt +her's, pg31100.txt +her's. pg31100.txt +her) pg3200.txt +her, pg31100.txt, pg3200.txt, pg100.txt +her," pg31100.txt +her,--'my pg31100.txt +her,--that pg31100.txt +her- pg100.txt +her-- pg31100.txt, pg3200.txt, pg100.txt +her--" pg3200.txt +her--and pg31100.txt, pg3200.txt +her--bring pg3200.txt +her--but pg31100.txt, pg3200.txt +her--for pg31100.txt +her--her pg3200.txt +her--isabella pg31100.txt +her--she pg3200.txt +her--so pg31100.txt +her--the pg3200.txt +her--there's pg3200.txt +her--which pg3200.txt +her--you pg3200.txt +her. pg31100.txt, pg3200.txt, pg100.txt +her." pg31100.txt, pg3200.txt +her.' pg3200.txt +her.) pg3200.txt +her.--if pg31100.txt +her.--it pg31100.txt +her.--the pg31100.txt +her.--this pg31100.txt +her.--well, pg31100.txt +her.] pg100.txt +her: pg31100.txt, pg3200.txt, pg100.txt +her; pg31100.txt, pg3200.txt, pg100.txt +her? pg31100.txt, pg3200.txt, pg100.txt +her?" pg31100.txt, pg3200.txt +her?--is pg31100.txt +her?--you, pg31100.txt +her] pg100.txt +herald pg3200.txt, pg100.txt +herald! pg100.txt +herald" pg3200.txt +herald, pg100.txt +herald. pg3200.txt, pg100.txt +herald.] pg3200.txt +herald: pg3200.txt +herald; pg100.txt +heraldry, pg100.txt +heraldry. pg3200.txt, pg100.txt +heralds pg100.txt +heralds, pg3200.txt +herb, pg100.txt +herb. pg100.txt +herbert pg3200.txt, pg100.txt +herbs pg3200.txt, pg100.txt +herbs, pg3200.txt +herculaneum pg3200.txt +hercules pg3200.txt, pg100.txt +hercules! pg100.txt +hercules, pg3200.txt, pg100.txt +hercules. pg3200.txt, pg100.txt +hercules." pg3200.txt +herd pg3200.txt, pg100.txt +herd, pg100.txt +herd. pg100.txt +herd? pg100.txt +herds pg3200.txt, pg100.txt +herdsmen. pg100.txt +here! pg31100.txt, pg3200.txt, pg100.txt +here!" pg31100.txt, pg3200.txt +here!' pg3200.txt +here!--but pg3200.txt +here!--not pg31100.txt +here's pg31100.txt, pg3200.txt, pg100.txt +here's- pg100.txt +here, pg31100.txt, pg3200.txt, pg100.txt +here," pg3200.txt +here,--both pg3200.txt +here- pg100.txt +here-- pg3200.txt +here--" pg3200.txt +here--do pg3200.txt +here--i pg3200.txt +here--in pg3200.txt +here--it pg3200.txt +here--nor pg31100.txt +here--nothing pg3200.txt +here--plainly pg3200.txt +here--that pg3200.txt +here--the pg3200.txt +here--until pg3200.txt +here--when?" pg3200.txt +here--you pg31100.txt +here-approach, pg100.txt +here. pg31100.txt, pg3200.txt, pg100.txt +here." pg31100.txt, pg3200.txt +here.' pg3200.txt, pg100.txt +here.'" pg3200.txt +here.--oh! pg31100.txt +here: pg3200.txt, pg100.txt +here; pg31100.txt, pg3200.txt, pg100.txt +here? pg31100.txt, pg3200.txt, pg100.txt +here?" pg31100.txt, pg3200.txt +here?' pg3200.txt +here?--miss pg31100.txt +here] pg3200.txt, pg100.txt +hereabout, pg100.txt +hereabout. pg100.txt +hereabouts pg31100.txt, pg3200.txt +hereabouts, pg3200.txt +hereabouts." pg31100.txt +hereabouts; pg31100.txt +hereafter pg31100.txt, pg3200.txt, pg100.txt +hereafter! pg31100.txt, pg100.txt +hereafter!" pg31100.txt +hereafter, pg31100.txt, pg3200.txt, pg100.txt +hereafter. pg31100.txt, pg3200.txt, pg100.txt +hereafter." pg3200.txt +hereafter.' pg3200.txt +hereafter; pg31100.txt, pg3200.txt, pg100.txt +hereafter?' pg3200.txt +hereby pg3200.txt +hereby, pg100.txt +hereby. pg100.txt +hereditaries pg3200.txt +hereditary pg3200.txt, pg100.txt +hereditary, pg100.txt +hereditary. pg3200.txt, pg100.txt +heredities pg3200.txt +heredities, pg3200.txt +heredity pg3200.txt +heredity. pg3200.txt +hereford! pg100.txt +hereford, pg100.txt +hereford. pg31100.txt +hereford; pg100.txt +hereford? pg100.txt +herefordshire pg100.txt +herefordshire, pg31100.txt +herein pg100.txt +herein! pg3200.txt +herein, pg100.txt +herein. pg100.txt +herein? pg100.txt +hereinkommen. pg3200.txt +hereof; pg100.txt +heresies, pg3200.txt, pg100.txt +heresy, pg100.txt +heresy. pg3200.txt +heresy: pg100.txt +heretic pg3200.txt +heretic!" pg3200.txt +heretic, pg100.txt +heretic; pg100.txt +heretofore pg31100.txt, pg3200.txt +herewith. pg3200.txt +herewith: pg3200.txt +heritage pg3200.txt +heritage; pg100.txt +hermaphrodite pg3200.txt +hermes. pg100.txt +hermia pg100.txt +hermia, pg100.txt +hermia. pg100.txt +hermia; pg100.txt +hermia? pg100.txt +hermione pg100.txt +hermione's; pg100.txt +hermione, pg100.txt +hermione. pg100.txt +hermit pg3200.txt +hermit, pg3200.txt +hermit. pg3200.txt +hermit." pg3200.txt +hermit; pg3200.txt +hermitage pg3200.txt +hermitage, pg3200.txt, pg100.txt +hermitage." pg31100.txt +hermits pg3200.txt +hermits. pg100.txt +hermon pg3200.txt +hermon, pg3200.txt +hermon,"--past pg3200.txt +herne pg100.txt +hero pg31100.txt, pg3200.txt, pg100.txt +hero! pg100.txt +hero's pg100.txt +hero, pg31100.txt, pg3200.txt +hero. pg3200.txt, pg100.txt +hero: pg3200.txt +hero; pg3200.txt +hero? pg100.txt +herod pg3200.txt +herod! pg3200.txt +herodias. pg3200.txt +herodotus pg3200.txt +herodotus, pg3200.txt +heroes pg3200.txt +heroes, pg3200.txt +heroes--dungeon pg3200.txt +heroes. pg3200.txt +heroic pg3200.txt +heroic, pg3200.txt +heroic; pg3200.txt +heroical. pg100.txt +heroine pg31100.txt, pg3200.txt +heroine's pg31100.txt +heroine, pg3200.txt +heroine--they pg3200.txt +heroines pg31100.txt, pg3200.txt +heroines, pg3200.txt +heroism pg31100.txt +heroism, pg3200.txt +heroism--that pg3200.txt +heroism. pg3200.txt +heroisms, pg3200.txt +herr pg3200.txt +herr! pg3200.txt +herr, pg3200.txt +herring. pg100.txt +herrings. pg3200.txt, pg100.txt +herrn pg3200.txt +hers pg31100.txt, pg3200.txt, pg100.txt +hers) pg3200.txt +hers, pg31100.txt, pg3200.txt, pg100.txt +hers," pg31100.txt +hers--just pg3200.txt +hers. pg31100.txt, pg3200.txt, pg100.txt +hers." pg31100.txt +hers.--yours pg31100.txt +hers; pg31100.txt, pg100.txt +hers;--he pg31100.txt +hers?" pg3200.txt +herse'f. pg3200.txt +herself pg31100.txt, pg3200.txt, pg100.txt +herself! pg31100.txt +herself) pg3200.txt +herself). pg3200.txt +herself, pg31100.txt, pg3200.txt, pg100.txt +herself,) pg31100.txt +herself-- pg31100.txt +herself---- pg3200.txt +herself--and pg3200.txt +herself--denied pg3200.txt +herself--i pg31100.txt +herself--no pg3200.txt +herself--or pg31100.txt +herself--they pg3200.txt +herself--when pg31100.txt +herself. pg31100.txt, pg3200.txt, pg100.txt +herself." pg31100.txt, pg3200.txt +herself.' pg3200.txt +herself.--and pg31100.txt +herself: pg3200.txt, pg100.txt +herself:-- pg31100.txt +herself; pg31100.txt, pg3200.txt, pg100.txt +herself;--and pg31100.txt +herself?" pg31100.txt, pg3200.txt +herself?--who pg31100.txt +herself] pg100.txt +hertford pg3200.txt +hertford-- pg3200.txt +hertford. pg3200.txt +hertford." pg3200.txt +hertford?" pg3200.txt +hertfordshire pg31100.txt +hertfordshire, pg31100.txt +hertfordshire. pg31100.txt +hertfordshire." pg31100.txt +hertfordshire; pg31100.txt +herts, pg3200.txt +hertzegovina--it pg3200.txt +hervey pg31100.txt +heself." pg3200.txt +hesitancy pg3200.txt +hesitancy: pg3200.txt +hesitate pg31100.txt, pg3200.txt +hesitate. pg3200.txt +hesitate." pg31100.txt +hesitate?--i pg31100.txt +hesitated pg31100.txt, pg3200.txt +hesitated. pg31100.txt +hesitating pg31100.txt, pg3200.txt +hesitating, pg3200.txt +hesitatingly pg31100.txt +hesitatingly, pg3200.txt +hesitatingly: pg3200.txt +hesitation pg31100.txt, pg3200.txt +hesitation, pg31100.txt, pg3200.txt +hesitation-- pg31100.txt +hesitation--piously: pg3200.txt +hesitation: pg3200.txt +hesitations pg31100.txt +hesperides? pg100.txt +hesse-wartegg.'-- pg3200.txt +hester! pg3200.txt +hester." pg3200.txt +hester?" pg3200.txt +hests, pg100.txt +heth!--look pg3200.txt +hetty, pg31100.txt +heute pg3200.txt +hewing pg3200.txt +hewitt pg3200.txt +hewn pg3200.txt +hey! pg100.txt +hey-day! pg100.txt +hey-ho! pg100.txt +hey?" pg3200.txt +hey?--how's pg3200.txt +heyday pg31100.txt +hi!" pg3200.txt +hi's" pg3200.txt +hi. pg3200.txt +hi." pg3200.txt +hiatus pg3200.txt +hiatus. pg3200.txt +hiawatha, pg3200.txt +hibiscus. pg3200.txt +hibocrates pg100.txt +hiccoughs. pg3200.txt +hickman pg3200.txt +hickman, pg3200.txt +hickory pg3200.txt +hickory! pg3200.txt +hickory!" pg3200.txt +hicks pg3200.txt +hicksville pg3200.txt +hid pg3200.txt, pg100.txt +hid! pg100.txt +hid, pg3200.txt, pg100.txt +hid. pg3200.txt, pg100.txt +hid." pg3200.txt +hid; pg3200.txt, pg100.txt +hid? pg100.txt +hidden pg3200.txt +hidden, pg3200.txt +hidden: pg3200.txt +hide pg31100.txt, pg3200.txt, pg100.txt +hide! pg100.txt +hide, pg3200.txt, pg100.txt +hide. pg3200.txt, pg100.txt +hide." pg3200.txt +hideous pg31100.txt, pg3200.txt +hideous!" pg3200.txt +hideous." pg3200.txt +hideously pg3200.txt, pg100.txt +hideousness, pg100.txt +hides pg3200.txt, pg100.txt +hides, pg100.txt +hides. pg100.txt +hiding pg31100.txt, pg3200.txt, pg100.txt +hiding-place pg3200.txt +hiding-place. pg3200.txt +hiding-places pg3200.txt +hiding. pg3200.txt +hie pg100.txt +hie! pg100.txt +hie. pg100.txt +hieroglyphics pg3200.txt +hies pg100.txt +hifalutin; pg3200.txt +higbie pg3200.txt +higbie's pg3200.txt +higbie, pg3200.txt +higbie. pg3200.txt +higgins" pg3200.txt +higgins, pg3200.txt +higgins. pg3200.txt +higgins?" pg3200.txt +higginson, pg3200.txt +higginsville pg3200.txt +high pg31100.txt, pg3200.txt, pg100.txt +high!" pg3200.txt +high, pg31100.txt, pg3200.txt, pg100.txt +high- pg100.txt +high--" pg3200.txt +high--a pg3200.txt +high--and pg3200.txt +high--grand pg3200.txt +high-bred pg3200.txt +high-bred." pg3200.txt +high-caste pg3200.txt +high-class pg3200.txt +high-colour'd. pg100.txt +high-flavored, pg3200.txt +high-grade pg3200.txt +high-handed pg3200.txt +high-low-jack pg3200.txt +high-minded pg3200.txt +high-mindedness pg3200.txt +high-placed pg3200.txt +high-priced pg3200.txt +high-proof pg100.txt +high-school pg3200.txt +high-sounding pg3200.txt +high-toned pg3200.txt +high-toned, pg3200.txt +high-water pg3200.txt +high. pg3200.txt, pg100.txt +high." pg31100.txt, pg3200.txt +high.' pg3200.txt +high; pg3200.txt, pg100.txt +high? pg3200.txt, pg100.txt +high?" pg3200.txt +high?' pg3200.txt +highbury pg31100.txt +highbury!" pg31100.txt +highbury, pg31100.txt +highbury--handsome pg31100.txt +highbury. pg31100.txt +highbury." pg31100.txt +highbury; pg31100.txt +highbury;--mr. pg31100.txt +highbury?" pg31100.txt +higher pg31100.txt, pg3200.txt +higher!" pg3200.txt +higher, pg3200.txt +higher-toned pg31100.txt +higher. pg31100.txt, pg3200.txt, pg100.txt +higher; pg100.txt +higher?" pg3200.txt +highest pg31100.txt, pg3200.txt +highest) pg3200.txt +highest, pg3200.txt +highest-- pg3200.txt +highest. pg100.txt +highest; pg3200.txt +highest? pg100.txt +highflown pg31100.txt +highland pg3200.txt +highland. pg3200.txt +highlands pg31100.txt, pg3200.txt +highlands, pg31100.txt, pg3200.txt +highlands--not pg3200.txt +highlands. pg3200.txt +highly pg31100.txt, pg3200.txt, pg100.txt +highly, pg100.txt +highness pg3200.txt, pg100.txt +highness! pg100.txt +highness!" pg3200.txt +highness' pg100.txt +highness, pg100.txt +highness- pg100.txt +highness. pg3200.txt, pg100.txt +highness." pg3200.txt +highness; pg100.txt +highness? pg100.txt +highness?" pg3200.txt +highroad pg3200.txt +hight pg3200.txt +hight, pg100.txt +highway pg3200.txt +highway, pg3200.txt +highway. pg3200.txt +highway; pg3200.txt, pg100.txt +highwayman, pg3200.txt +highwaymen; pg3200.txt +highways pg100.txt +hilarious pg3200.txt +hilarity pg3200.txt +hilarity. pg3200.txt +hildebrand pg3200.txt +hildegarde, pg3200.txt +hilding! pg100.txt +hill pg31100.txt, pg3200.txt, pg100.txt +hill! pg3200.txt +hill!" pg3200.txt +hill, pg31100.txt, pg3200.txt, pg100.txt +hill," pg3200.txt +hill-side pg3200.txt +hill-slopes. pg3200.txt +hill. pg31100.txt, pg3200.txt, pg100.txt +hill." pg3200.txt +hill: pg100.txt +hill; pg3200.txt, pg100.txt +hill? pg100.txt +hillock pg3200.txt +hills pg31100.txt, pg3200.txt, pg100.txt +hills! pg31100.txt +hills!'" pg3200.txt +hills) pg3200.txt +hills, pg31100.txt, pg3200.txt, pg100.txt +hills--perfect pg3200.txt +hills. pg3200.txt, pg100.txt +hills." pg3200.txt +hills; pg3200.txt +hills? pg3200.txt +hillside pg3200.txt +hillside, pg3200.txt +hillside. pg3200.txt +hillsides, pg3200.txt +hilltop, pg3200.txt +hilltop; pg3200.txt +hilltops pg3200.txt +hilly, pg3200.txt +hillyer pg3200.txt +hilt pg3200.txt, pg100.txt +hilt, pg100.txt +hilts pg100.txt +hilts; pg100.txt +him! pg31100.txt, pg3200.txt, pg100.txt +him!" pg31100.txt, pg3200.txt +him!' pg3200.txt, pg100.txt +him!-- pg3200.txt +him!--only pg31100.txt +him!--wait pg3200.txt +him!... pg3200.txt +him) pg100.txt +him). pg31100.txt +him)...., pg3200.txt +him, pg31100.txt, pg3200.txt, pg100.txt +him," pg31100.txt, pg3200.txt +him,"-- pg31100.txt +him,' pg100.txt +him,) pg31100.txt, pg3200.txt +him- pg100.txt +him-- pg3200.txt +him--" pg31100.txt, pg3200.txt +him--' pg3200.txt +him--'good pg3200.txt +him--a pg3200.txt +him--all pg3200.txt +him--and pg3200.txt +him--any pg3200.txt +him--as pg3200.txt +him--burn pg3200.txt +him--but pg31100.txt +him--certainly pg3200.txt +him--dey pg3200.txt +him--did pg3200.txt +him--first pg3200.txt +him--for pg3200.txt +him--he pg3200.txt +him--i pg31100.txt, pg3200.txt +him--insomuch pg3200.txt +him--it pg3200.txt +him--just pg31100.txt +him--letters pg3200.txt +him--like pg3200.txt +him--nay, pg31100.txt +him--needs pg3200.txt +him--no, pg3200.txt +him--or pg31100.txt +him--privately--that pg3200.txt +him--respected pg3200.txt +him--that pg3200.txt +him--the pg3200.txt +him--these pg3200.txt +him--was pg3200.txt +him--what pg3200.txt +him--why, pg3200.txt +him-privily pg100.txt +him-self pg100.txt +him-though pg100.txt +him. pg31100.txt, pg3200.txt, pg100.txt +him." pg31100.txt, pg3200.txt +him."--reciprocal pg31100.txt +him.' pg3200.txt, pg100.txt +him.) pg3200.txt +him.--he pg31100.txt +him.--how pg31100.txt +him.] pg3200.txt, pg100.txt +him._ pg31100.txt +him._] pg31100.txt +him: pg31100.txt, pg3200.txt, pg100.txt +him; pg31100.txt, pg3200.txt, pg100.txt +him;--and pg31100.txt +him? pg31100.txt, pg3200.txt, pg100.txt +him?" pg31100.txt, pg3200.txt +him?' pg3200.txt +him?'" pg100.txt +him?--was pg31100.txt +him] pg100.txt +him]. pg100.txt +himalayas pg3200.txt +himalayas. pg3200.txt +himmalayan pg3200.txt +himself pg31100.txt, pg3200.txt, pg100.txt +himself! pg3200.txt, pg100.txt +himself!" pg3200.txt +himself!--was pg31100.txt +himself). pg3200.txt +himself, pg31100.txt, pg3200.txt, pg100.txt +himself- pg100.txt +himself-- pg3200.txt +himself--. pg31100.txt +himself--could pg3200.txt +himself--her pg31100.txt +himself--on pg3200.txt +himself--or pg3200.txt +himself--prevented pg3200.txt +himself--that pg3200.txt +himself--then pg3200.txt +himself--which pg3200.txt +himself. pg31100.txt, pg3200.txt, pg100.txt +himself." pg31100.txt, pg3200.txt +himself.) pg3200.txt +himself.--could pg31100.txt +himself.--mr. pg3200.txt +himself: pg3200.txt +himself; pg31100.txt, pg3200.txt, pg100.txt +himself;--but pg31100.txt +himself? pg31100.txt, pg3200.txt, pg100.txt +himself?" pg31100.txt, pg3200.txt +himself] pg100.txt +hinc. pg100.txt +hind pg3200.txt, pg100.txt +hind! pg100.txt +hind'red, pg100.txt +hind, pg100.txt +hind; pg100.txt +hinder pg100.txt +hinder. pg100.txt +hinder." pg3200.txt +hindered, pg100.txt +hindering pg3200.txt +hinders pg3200.txt +hindmost; pg100.txt +hindoo pg3200.txt +hindoo. pg3200.txt +hindoos. pg3200.txt +hindostan, pg3200.txt +hindostani." pg3200.txt +hindrance pg3200.txt +hindrance. pg3200.txt +hindrances pg3200.txt +hindrances, pg3200.txt +hinds, pg100.txt +hinds. pg100.txt +hinds? pg100.txt +hindu pg3200.txt +hindurch. pg3200.txt +hinein, pg3200.txt +hines?" pg3200.txt +hinges, pg3200.txt +hingston pg3200.txt +hinself. pg3200.txt +hint pg31100.txt, pg3200.txt, pg100.txt +hint, pg31100.txt, pg3200.txt, pg100.txt +hint. pg31100.txt, pg3200.txt +hint; pg31100.txt, pg100.txt +hinted pg3200.txt +hinting pg31100.txt +hints pg31100.txt, pg3200.txt +hints, pg31100.txt, pg3200.txt +hints. pg31100.txt, pg3200.txt +hinzustellen? pg3200.txt +hip pg3200.txt +hip, pg3200.txt, pg100.txt +hip--" pg3200.txt +hip--hip pg3200.txt +hip. pg100.txt +hipp'd, pg100.txt +hippicus, pg3200.txt +hippicus. pg3200.txt +hippolyta pg100.txt +hippolyta, pg100.txt +hippolyta. pg100.txt +hips pg3200.txt +hips, pg100.txt +hips: pg3200.txt +hips; pg100.txt +hire pg3200.txt, pg100.txt +hire, pg3200.txt +hire. pg31100.txt, pg3200.txt, pg100.txt +hire; pg100.txt +hired pg31100.txt, pg3200.txt +hired. pg3200.txt +hirelings pg3200.txt +hiren pg100.txt +hires pg3200.txt +hiring pg3200.txt +hirschhorn pg3200.txt +his! pg31100.txt, pg3200.txt +his'n pg3200.txt +his'n." pg3200.txt +his'n; pg3200.txt +his) pg100.txt +his, pg31100.txt, pg3200.txt, pg100.txt +his--" pg3200.txt +his--they're pg3200.txt +his-success. pg3200.txt +his. pg31100.txt, pg3200.txt, pg100.txt +his." pg31100.txt, pg3200.txt +his.--do pg31100.txt +his.--it pg31100.txt +his.--they pg31100.txt +his: pg3200.txt, pg100.txt +his; pg31100.txt, pg3200.txt, pg100.txt +his? pg3200.txt, pg100.txt +his?" pg31100.txt, pg3200.txt +hisn. pg3200.txt +hiss pg100.txt +hiss, pg3200.txt, pg100.txt +hiss. pg100.txt +hisse'f pg3200.txt +hissed pg3200.txt +hissed. pg3200.txt +hisself pg3200.txt +hisself, pg3200.txt +hisself. pg3200.txt +hisses pg3200.txt +hisses, pg3200.txt +hissing pg3200.txt +historian pg3200.txt +historian's pg3200.txt +historian, pg31100.txt +historian. pg31100.txt +historic pg3200.txt +historical pg3200.txt +historical-pastoral, pg100.txt +historical. pg3200.txt +historically-instructive pg3200.txt +histories pg3200.txt +histories, pg3200.txt +histories. pg3200.txt +history pg31100.txt, pg3200.txt, pg100.txt +history! pg3200.txt +history, pg3200.txt, pg100.txt +history--a pg3200.txt +history--and pg3200.txt +history--our pg3200.txt +history--substantially pg3200.txt +history-lesson pg3200.txt +history-mill pg3200.txt +history-sodden pg3200.txt +history. pg31100.txt, pg3200.txt, pg100.txt +history." pg31100.txt, pg3200.txt +history: pg3200.txt +history; pg31100.txt, pg3200.txt, pg100.txt +history? pg100.txt +history?" pg3200.txt +histrionic pg3200.txt +hit pg31100.txt, pg3200.txt, pg100.txt +hit!" pg3200.txt +hit's pg3200.txt +hit, pg100.txt +hit. pg3200.txt, pg100.txt +hit? pg100.txt +hitch pg3200.txt +hitchcock; pg3200.txt +hitched pg3200.txt +hitchener, pg3200.txt +hitching pg3200.txt +hither pg31100.txt, pg3200.txt, pg100.txt +hither, pg31100.txt, pg100.txt +hither. pg100.txt +hither." pg3200.txt +hither: pg100.txt +hither; pg3200.txt, pg100.txt +hither? pg31100.txt, pg100.txt +hitherto pg31100.txt, pg3200.txt +hitherto, pg31100.txt, pg100.txt +hitherward, pg100.txt +hitherward. pg100.txt +hits pg100.txt +hits, pg100.txt +hits. pg100.txt +hitting pg3200.txt, pg100.txt +hitze pg3200.txt +hive pg3200.txt +hive, pg100.txt +hive. pg3200.txt +hives, pg3200.txt +hji pg3200.txt +ho! pg100.txt +ho!" pg3200.txt +ho. pg100.txt +ho? pg100.txt +hoa! pg100.txt +hoar pg100.txt +hoar, pg100.txt +hoarse pg3200.txt, pg100.txt +hoarsely pg3200.txt +hoarsely, pg3200.txt +hoary pg3200.txt +hobart pg3200.txt +hobart, pg3200.txt +hobble pg3200.txt +hobbs pg3200.txt +hobbs' pg3200.txt +hobbs. pg3200.txt +hobby; pg3200.txt +hobson pg3200.txt +hoc. pg100.txt +hoch pg3200.txt +hoch! pg3200.txt +hoch, pg3200.txt +hod, pg3200.txt +hod-carriers, pg3200.txt +hod-carrying pg3200.txt +hog pg3200.txt +hog, pg100.txt +hog--a pg3200.txt +hog. pg3200.txt, pg100.txt +hogan pg3200.txt +hogans pg3200.txt +hogg pg3200.txt +hogg: pg3200.txt +hogged pg3200.txt +hogglebumgullup.' pg3200.txt +hogs pg3200.txt +hogs, pg3200.txt +hogs-head? pg100.txt +hogs. pg3200.txt +hogs." pg3200.txt +hogshead! pg3200.txt +hogshead. pg100.txt +hogsheads pg3200.txt +hogsheads, pg3200.txt +hogsheads-'measures,' pg3200.txt +hogsheads--"measures," pg3200.txt +hogsheads; pg3200.txt +hogwash pg3200.txt +hoh" pg3200.txt +hoist pg3200.txt +hoisted pg3200.txt +hoisting pg3200.txt +hoisting-works, pg3200.txt +holborn pg100.txt +hold pg31100.txt, pg3200.txt, pg100.txt +hold! pg100.txt +hold!" pg100.txt +hold!--put pg3200.txt +hold, pg3200.txt, pg100.txt +hold-alls, pg3200.txt +hold-of pg3200.txt +hold. pg3200.txt, pg100.txt +hold.' pg3200.txt +hold.'" pg31100.txt +hold; pg3200.txt, pg100.txt +hold? pg100.txt +holdeth pg100.txt +holding pg3200.txt, pg100.txt +holding, pg100.txt +holds pg3200.txt, pg100.txt +holds, pg3200.txt, pg100.txt +hole pg3200.txt, pg100.txt +hole!' pg3200.txt +hole, pg3200.txt, pg100.txt +hole--three pg3200.txt +hole-in-the-wall, pg3200.txt +hole. pg3200.txt, pg100.txt +hole." pg3200.txt +hole.' pg3200.txt +hole; pg3200.txt +hole? pg100.txt +hole?" pg3200.txt +holeakala--comparison pg3200.txt +holes pg3200.txt, pg100.txt +holes! pg100.txt +holes, pg3200.txt, pg100.txt +holes. pg100.txt +holes." pg3200.txt +holes; pg3200.txt +holidam, pg100.txt +holidame, pg100.txt +holiday pg3200.txt, pg100.txt +holiday, pg3200.txt +holiday-forgetfulness pg3200.txt +holiday-time pg100.txt +holiday. pg3200.txt, pg100.txt +holiday." pg3200.txt +holiday? pg3200.txt, pg100.txt +holiday?" pg3200.txt +holidays pg31100.txt, pg3200.txt +holidays, pg31100.txt, pg3200.txt, pg100.txt +holidays. pg31100.txt, pg3200.txt +holidays." pg31100.txt +holies pg3200.txt +holiest pg3200.txt +holily pg100.txt +holiness pg100.txt +holiness! pg3200.txt +holiness, pg3200.txt, pg100.txt +holiness--" pg3200.txt +holiness. pg3200.txt, pg100.txt +holiness." pg3200.txt +holiness; pg100.txt +holla! pg100.txt +holla, pg100.txt +holland pg3200.txt, pg100.txt +hollander pg100.txt +hollander- pg100.txt +hollanders, pg100.txt +holler pg3200.txt +hollered pg3200.txt +hollered. pg3200.txt +hollering. pg3200.txt +hollers pg3200.txt +holliday pg3200.txt +holliday, pg3200.txt +hollis, pg3200.txt +hollow pg3200.txt, pg100.txt +hollow, pg3200.txt +hollow-eyed, pg3200.txt +hollow; pg3200.txt +hollowed pg3200.txt +hollowness. pg100.txt +holly! pg100.txt +holly. pg100.txt +holmes pg3200.txt +holmes's pg3200.txt +holmes, pg3200.txt +holmes. pg3200.txt +holocaust pg3200.txt +holocaust! pg3200.txt +holophotal pg3200.txt +holp pg100.txt +holpen pg3200.txt +holsatia, pg3200.txt +holt, pg3200.txt +holy pg3200.txt, pg100.txt +holy!" pg3200.txt +holy, pg3200.txt, pg100.txt +holy. pg3200.txt +holy; pg3200.txt, pg100.txt +holyday pg3200.txt +holyoke pg3200.txt +holystoned pg3200.txt +homage pg3200.txt +homage, pg3200.txt, pg100.txt +homage. pg3200.txt, pg100.txt +homage; pg3200.txt +home pg31100.txt, pg3200.txt, pg100.txt +home! pg3200.txt, pg100.txt +home!" pg31100.txt, pg3200.txt +home" pg3200.txt +home". pg31100.txt +home"; pg3200.txt +home, pg31100.txt, pg3200.txt, pg100.txt +home," pg31100.txt +home--amen pg3200.txt +home--and pg31100.txt, pg3200.txt +home--better, pg3200.txt +home--m.t.] pg3200.txt +home--our pg3200.txt +home--that pg3200.txt +home-farm pg31100.txt +home-life. pg3200.txt +home-like pg3200.txt +home-like." pg3200.txt +home-made pg3200.txt +home-place pg3200.txt +home-sickness pg3200.txt +home. pg31100.txt, pg3200.txt, pg100.txt +home." pg31100.txt, pg3200.txt +home.' pg3200.txt +home.... pg3200.txt +home: pg3200.txt, pg100.txt +home; pg31100.txt, pg3200.txt, pg100.txt +home? pg3200.txt, pg100.txt +home?" pg31100.txt, pg3200.txt +home?' pg3200.txt +homeless pg3200.txt +homeless, pg3200.txt +homeless?" pg3200.txt +homely pg3200.txt, pg100.txt +homely. pg3200.txt +homeopath pg3200.txt +homeopaths, pg3200.txt +homer pg3200.txt +homer, pg3200.txt +homes pg3200.txt +homes, pg3200.txt, pg100.txt +homesick pg3200.txt +homesick. pg3200.txt +homesick?" pg3200.txt +homesickness pg3200.txt +homestead pg3200.txt +homestead; pg3200.txt +homestretch, pg3200.txt +homeward pg3200.txt +homeward. pg3200.txt +homeward: pg3200.txt +homewards. pg100.txt +homicide pg3200.txt +homicide, pg100.txt +homicide. pg3200.txt, pg100.txt +homicide; pg100.txt +homicide?" pg3200.txt +homicides; pg100.txt +homily, pg3200.txt +hominem pg3200.txt +hominy--anything, pg3200.txt +homogeneous pg3200.txt +hon. pg3200.txt +honest pg31100.txt, pg3200.txt, pg100.txt +honest! pg100.txt +honest, pg31100.txt, pg3200.txt, pg100.txt +honest- pg100.txt +honest--notwithstanding, pg3200.txt +honest-true, pg100.txt +honest. pg31100.txt, pg3200.txt, pg100.txt +honest." pg3200.txt +honest; pg100.txt +honest? pg100.txt +honest?" pg3200.txt +honester pg100.txt +honestly pg31100.txt, pg3200.txt +honestly, pg3200.txt +honestly. pg100.txt +honestly." pg3200.txt +honesty pg31100.txt, pg3200.txt, pg100.txt +honesty, pg3200.txt, pg100.txt +honesty. pg3200.txt, pg100.txt +honesty; pg100.txt +honesty? pg3200.txt, pg100.txt +honey pg3200.txt, pg100.txt +honey, pg3200.txt, pg100.txt +honey-combed pg3200.txt +honey-seed; pg100.txt +honey." pg3200.txt +honey?" pg3200.txt +honeycombed pg3200.txt +honeyless. pg100.txt +honeysuckle pg100.txt +hong-kong, pg3200.txt +hong-kong;' pg3200.txt +hong-wo's pg3200.txt +honolulu pg3200.txt +honolulu, pg3200.txt +honolulu. pg3200.txt +honor pg31100.txt, pg3200.txt, pg100.txt +honor!" pg3200.txt +honor, pg3200.txt, pg100.txt +honor--" pg3200.txt +honor--both.' pg3200.txt +honor--not pg3200.txt +honor. pg3200.txt, pg100.txt +honor." pg3200.txt +honor.' pg3200.txt +honor; pg3200.txt +honor? pg3200.txt +honor?" pg3200.txt +honorable pg3200.txt +honorable, pg100.txt +honorable. pg100.txt +honorably pg3200.txt +honorably. pg100.txt +honorary pg3200.txt +honored pg3200.txt +honored! pg3200.txt +honored, pg3200.txt +honored,--so pg3200.txt +honored. pg3200.txt +honored.' pg3200.txt +honoring pg3200.txt +honors pg3200.txt, pg100.txt +honors, pg3200.txt +honors- pg100.txt +honors-- pg3200.txt +honors. pg3200.txt +honour pg31100.txt, pg3200.txt, pg100.txt +honour! pg100.txt +honour'd pg100.txt +honour, pg31100.txt, pg100.txt +honour- pg100.txt +honour-flaw'd- pg100.txt +honour. pg31100.txt, pg100.txt +honour: pg100.txt +honour; pg31100.txt, pg100.txt +honour? pg100.txt +honourable pg31100.txt, pg3200.txt, pg100.txt +honourable! pg100.txt +honourable, pg31100.txt, pg3200.txt, pg100.txt +honourable- pg100.txt +honourable. pg100.txt +honourable." pg31100.txt +honourable.' pg100.txt +honourable.--yes, pg31100.txt +honourable? pg100.txt +honourably pg31100.txt +honourably. pg100.txt +honourably; pg100.txt +honoured pg31100.txt, pg3200.txt, pg100.txt +honoured!" pg31100.txt +honoured, pg31100.txt +honoured. pg100.txt +honoured." pg31100.txt +honoured.-- pg31100.txt +honouring pg31100.txt +honouring, pg100.txt +honours pg31100.txt, pg100.txt +honours, pg31100.txt, pg100.txt +honours. pg100.txt +honours: pg3200.txt +honours; pg100.txt +honours? pg100.txt +hoo! pg100.txt +hoo!- pg100.txt +hoo'hoo! pg3200.txt +hoo-oo! pg100.txt +hood pg3200.txt, pg100.txt +hood! pg100.txt +hood, pg3200.txt +hood. pg3200.txt +hood?" pg3200.txt +hoodlums, pg3200.txt +hoodman-blind? pg100.txt +hoodwink'd. pg100.txt +hoodwink. pg100.txt +hoof-prints pg3200.txt +hoof-prints. pg3200.txt +hoofs pg100.txt +hoofs, pg3200.txt +hoofs--and pg3200.txt +hoofs. pg3200.txt, pg100.txt +hook pg3200.txt, pg100.txt +hook, pg3200.txt, pg100.txt +hook-nos'd pg100.txt +hook-rope pg3200.txt +hook. pg3200.txt +hook? pg3200.txt +hook?" pg3200.txt +hooked pg3200.txt +hooker, pg3200.txt +hookey pg3200.txt +hookio. pg3200.txt +hooks pg3200.txt +hooks, pg100.txt +hooks. pg3200.txt, pg100.txt +hoom pg3200.txt +hoop! pg100.txt +hoop-rolling, pg3200.txt +hoop-skirt; pg3200.txt +hoop-stick." pg3200.txt +hooped pg3200.txt +hoops; pg100.txt +hooted pg100.txt +hooting pg3200.txt, pg100.txt +hootings pg3200.txt +hootings, pg3200.txt +hop pg3200.txt +hop'd pg100.txt +hop, pg31100.txt +hop-scotch pg3200.txt +hop; pg3200.txt +hop?" pg3200.txt +hope pg31100.txt, pg3200.txt, pg100.txt +hope! pg31100.txt, pg3200.txt +hope!" pg3200.txt +hope"; pg3200.txt +hope' pg100.txt +hope, pg31100.txt, pg3200.txt, pg100.txt +hope," pg31100.txt +hope--" pg3200.txt +hope--all pg3200.txt +hope--but pg3200.txt +hope. pg31100.txt, pg3200.txt, pg100.txt +hope." pg31100.txt, pg3200.txt +hope: pg3200.txt, pg100.txt +hope; pg3200.txt, pg100.txt +hope? pg31100.txt, pg100.txt +hope?" pg31100.txt, pg3200.txt +hoped pg31100.txt, pg3200.txt +hoped, pg31100.txt, pg3200.txt +hoped--that pg3200.txt +hoped. pg31100.txt +hopeful pg31100.txt, pg3200.txt +hopeful! pg3200.txt +hopeful, pg3200.txt +hopeful. pg3200.txt +hopefully pg3200.txt +hopefully. pg3200.txt +hopefulness: pg3200.txt +hopeless pg31100.txt, pg3200.txt, pg100.txt +hopeless! pg100.txt +hopeless, pg3200.txt +hopeless. pg31100.txt, pg3200.txt +hopeless." pg3200.txt +hopeless; pg3200.txt +hopeless?" pg3200.txt +hopelessly pg3200.txt +hopelessness pg3200.txt +hopes pg31100.txt, pg3200.txt, pg100.txt +hopes, pg31100.txt, pg100.txt +hopes- pg100.txt +hopes. pg31100.txt, pg3200.txt, pg100.txt +hopes; pg100.txt +hoping pg31100.txt, pg3200.txt, pg100.txt +hoping, pg3200.txt +hopkins pg3200.txt +hopkins.' pg3200.txt +hopkins? pg100.txt +hopkinson pg3200.txt +hopped pg3200.txt +hopping pg3200.txt +horace pg3200.txt +horatio pg100.txt +horatio! pg100.txt +horatio, pg100.txt +horatio. pg100.txt +horatio.] pg100.txt +horatio? pg100.txt +horde pg3200.txt +hordes pg3200.txt +horizon pg3200.txt +horizon, pg3200.txt, pg100.txt +horizon. pg31100.txt, pg3200.txt +horizon." pg3200.txt +horizonless pg3200.txt +horizontal pg3200.txt +horn pg3200.txt, pg100.txt +horn, pg3200.txt, pg100.txt +horn," pg3200.txt +horn-mad. pg100.txt +horn. pg100.txt +horn; pg100.txt +hornback--" pg3200.txt +hornblende, pg3200.txt +hornblow, pg3200.txt +horner, pg100.txt +hornet's pg3200.txt +hornets' pg3200.txt +horning, pg100.txt +hornpipe, pg31100.txt, pg3200.txt +hornpipe; pg3200.txt +horns pg100.txt +horns, pg3200.txt, pg100.txt +horns,' pg100.txt +horns. pg100.txt +horns; pg100.txt +horns] pg100.txt +horrible pg31100.txt, pg3200.txt, pg100.txt +horrible! pg3200.txt, pg100.txt +horrible!" pg31100.txt +horrible, pg3200.txt, pg100.txt +horrible. pg3200.txt, pg100.txt +horribly pg3200.txt +horribly.) pg3200.txt +horrid pg31100.txt, pg3200.txt +horrid?" pg31100.txt +horrified pg3200.txt +horrified, pg3200.txt +horror pg31100.txt, pg3200.txt +horror! pg100.txt +horror, pg31100.txt, pg3200.txt, pg100.txt +horror. pg31100.txt, pg3200.txt, pg100.txt +horror? pg100.txt +horrors pg31100.txt, pg3200.txt +horrors, pg100.txt +horrors. pg3200.txt +horrors.' pg3200.txt +horrors; pg100.txt +hors'd pg100.txt +hors'd, pg100.txt +horse pg31100.txt, pg3200.txt, pg100.txt +horse! pg100.txt +horse!" pg3200.txt +horse" pg3200.txt +horse's pg3200.txt, pg100.txt +horse's, pg3200.txt +horse, pg31100.txt, pg3200.txt, pg100.txt +horse- pg3200.txt +horse--a pg3200.txt +horse--said pg3200.txt +horse--yet pg3200.txt +horse-back, pg3200.txt +horse-car pg3200.txt +horse-doctor"--medicine; pg3200.txt +horse-doctor, pg3200.txt +horse-doctor. pg3200.txt +horse-doctors pg3200.txt +horse-drench. pg100.txt +horse-faced, pg3200.txt +horse-leech. pg3200.txt +horse-piss pg100.txt +horse-pond! pg3200.txt +horse-race, pg3200.txt +horse-races pg3200.txt +horse-shoe pg3200.txt +horse-shoe; pg100.txt +horse-shoeing, pg3200.txt +horse-stealer; pg100.txt +horse-tail pg3200.txt +horse-troughs, pg3200.txt +horse-woman, pg31100.txt +horse. pg31100.txt, pg3200.txt, pg100.txt +horse." pg31100.txt, pg3200.txt +horse; pg3200.txt, pg100.txt +horse? pg3200.txt, pg100.txt +horse?" pg3200.txt +horse?--nonsense--these pg3200.txt +horseback pg3200.txt +horseback, pg31100.txt, pg3200.txt +horseback-riding pg3200.txt +horseback. pg31100.txt, pg3200.txt +horseback; pg100.txt +horsed pg100.txt +horseflesh--pilgrim pg3200.txt +horseman pg3200.txt +horsemanship pg31100.txt, pg3200.txt +horsemanship. pg3200.txt, pg100.txt +horsemen pg3200.txt +horsemen, pg3200.txt +horsemen. pg3200.txt +horseneck pg3200.txt +horses pg31100.txt, pg3200.txt, pg100.txt +horses! pg3200.txt, pg100.txt +horses' pg3200.txt +horses, pg31100.txt, pg3200.txt, pg100.txt +horses- pg100.txt +horses--landau, pg3200.txt +horses--watching, pg3200.txt +horses. pg31100.txt, pg3200.txt, pg100.txt +horses." pg31100.txt, pg3200.txt +horses; pg31100.txt, pg3200.txt, pg100.txt +horses? pg31100.txt, pg100.txt +horses?" pg31100.txt +horseshoe pg3200.txt +horseshoes pg3200.txt +horsewhipped!--horsewhipped pg3200.txt +horsewhipped, pg3200.txt +horsewoman. pg3200.txt +horsham. pg3200.txt +horta pg3200.txt +hortensio pg100.txt +hortensio, pg100.txt +hortensio. pg100.txt +hortensio? pg100.txt +hortensio] pg100.txt +hortensius pg100.txt +hortensius. pg100.txt +horton pg3200.txt +horum. pg100.txt +hosannah pg3200.txt +hosannahing pg3200.txt +hose pg3200.txt, pg100.txt +hose, pg100.txt +hose. pg100.txt +hose: pg100.txt +hose? pg100.txt +hospenthal pg3200.txt +hospice. pg3200.txt +hospitable pg31100.txt, pg3200.txt +hospitable. pg31100.txt +hospitably. pg3200.txt +hospitably; pg3200.txt +hospital pg3200.txt +hospital, pg3200.txt +hospital. pg3200.txt, pg100.txt +hospital?" pg3200.txt +hospitalities pg3200.txt +hospitalities. pg3200.txt +hospitality pg31100.txt, pg3200.txt +hospitality!" pg3200.txt +hospitality, pg31100.txt +hospitality--in pg3200.txt +hospitality. pg3200.txt, pg100.txt +hospitals pg3200.txt +hoss." pg3200.txt +host pg31100.txt, pg3200.txt, pg100.txt +host! pg100.txt +host, pg3200.txt, pg100.txt +host- pg100.txt +host. pg3200.txt, pg100.txt +host: pg100.txt +host; pg100.txt +host? pg100.txt +hostage pg3200.txt, pg100.txt +hostages, pg100.txt +hostel. pg3200.txt +hostelry!--and pg3200.txt +hostess pg3200.txt, pg100.txt +hostess! pg100.txt +hostess, pg3200.txt, pg100.txt +hostess. pg100.txt +hostess? pg100.txt +hostetter's pg3200.txt +hostile pg3200.txt +hostilities, pg3200.txt +hostility pg3200.txt, pg100.txt +hostility, pg3200.txt +hostility. pg3200.txt +hostility; pg3200.txt, pg100.txt +hostilius? pg100.txt +hostler pg3200.txt +hostlers pg3200.txt +hostlers. pg3200.txt +hosts pg3200.txt +hosts." pg3200.txt +hot pg31100.txt, pg3200.txt, pg100.txt +hot! pg3200.txt, pg100.txt +hot, pg31100.txt, pg3200.txt, pg100.txt +hot--outside; pg3200.txt +hot. pg31100.txt, pg3200.txt, pg100.txt +hot: pg100.txt +hot; pg100.txt +hot?" pg3200.txt +hotchkiss, pg3200.txt +hotchkiss--you pg3200.txt +hotel pg3200.txt +hotel! pg3200.txt +hotel's pg3200.txt +hotel) pg3200.txt +hotel, pg3200.txt +hotel,) pg3200.txt +hotel--a pg3200.txt +hotel--our pg3200.txt +hotel--preparing pg3200.txt +hotel-counter pg3200.txt +hotel-keeper, pg3200.txt +hotel. pg3200.txt +hotel." pg3200.txt +hotel.' pg3200.txt +hotel; pg3200.txt +hotel? pg3200.txt +hotel?" pg3200.txt +hotels pg3200.txt +hotels, pg3200.txt +hotels. pg3200.txt +hotels." pg3200.txt +hotels; pg3200.txt +hotelward-bound pg3200.txt +hotly. pg3200.txt +hotspur, pg100.txt +hotspur. pg100.txt +hottentots, pg3200.txt +hotter pg3200.txt +hotter, pg3200.txt +houghton, pg3200.txt +hound pg3200.txt, pg100.txt +hound! pg100.txt +hound, pg100.txt +hound. pg3200.txt +hound.' pg3200.txt +hounds pg100.txt +hounds, pg100.txt +hounds. pg100.txt +hounds; pg100.txt +hour pg31100.txt, pg3200.txt, pg100.txt +hour! pg3200.txt, pg100.txt +hour!" pg31100.txt, pg3200.txt +hour!- pg100.txt +hour's pg31100.txt, pg3200.txt +hour) pg31100.txt +hour, pg31100.txt, pg3200.txt, pg100.txt +hour- pg100.txt +hour-- pg3200.txt +hour--" pg3200.txt +hour--and pg3200.txt +hour--apparently. pg3200.txt +hour--but pg3200.txt +hour--then pg3200.txt +hour--until pg3200.txt +hour--which pg3200.txt +hour. pg31100.txt, pg3200.txt, pg100.txt +hour." pg31100.txt, pg3200.txt +hour.' pg3200.txt +hour.--he pg31100.txt +hour: pg100.txt +hour; pg31100.txt, pg3200.txt, pg100.txt +hour? pg3200.txt, pg100.txt +hour?" pg31100.txt, pg3200.txt +hourly pg100.txt +hourly, pg3200.txt +hours pg31100.txt, pg3200.txt, pg100.txt +hours! pg3200.txt, pg100.txt +hours!" pg3200.txt +hours' pg3200.txt +hours) pg3200.txt +hours, pg31100.txt, pg3200.txt, pg100.txt +hours--and pg3200.txt +hours--changed pg3200.txt +hours--just pg3200.txt +hours--that pg3200.txt +hours--well, pg3200.txt +hours. pg31100.txt, pg3200.txt, pg100.txt +hours." pg31100.txt, pg3200.txt +hours; pg3200.txt, pg100.txt +hours? pg100.txt +hours?' pg3200.txt +house pg31100.txt, pg3200.txt, pg100.txt +house! pg31100.txt, pg100.txt +house!" pg31100.txt, pg3200.txt +house!-- pg31100.txt +house" pg3200.txt +house, pg31100.txt, pg3200.txt, pg100.txt +house," pg3200.txt +house- pg3200.txt +house--bitter pg3200.txt +house--take pg3200.txt +house--the pg31100.txt +house-afire. pg3200.txt +house-fed, pg3200.txt +house-fly pg3200.txt +house-front, pg3200.txt +house-front. pg3200.txt +house-keeping. pg100.txt +house-roof. pg3200.txt +house-tops pg3200.txt +house-warming pg3200.txt +house. pg31100.txt, pg3200.txt, pg100.txt +house." pg31100.txt, pg3200.txt +house.' pg3200.txt, pg100.txt +house.'" pg3200.txt +house.] pg3200.txt, pg100.txt +house: pg3200.txt, pg100.txt +house; pg31100.txt, pg3200.txt, pg100.txt +house? pg31100.txt, pg3200.txt, pg100.txt +house?" pg31100.txt, pg3200.txt +house?' pg3200.txt +house] pg100.txt +housecat, pg3200.txt +household pg3200.txt +household, pg3200.txt +household. pg3200.txt +household." pg31100.txt, pg3200.txt +household; pg3200.txt, pg100.txt +household? pg3200.txt +households pg3200.txt +households. pg3200.txt +households." pg3200.txt +households: pg3200.txt +housekeeper pg31100.txt +housekeeper, pg31100.txt +housekeeper." pg31100.txt +housekeeper._] pg31100.txt +housekeeper; pg3200.txt +housekeepers, pg31100.txt +housekeepers. pg100.txt +housekeeping, pg3200.txt, pg100.txt +houseless pg3200.txt +houseless, pg3200.txt +housemaid pg31100.txt +housemaid, pg31100.txt +housemaids pg31100.txt +housemaids. pg31100.txt +housemate pg3200.txt +housemates. pg3200.txt +houses pg31100.txt, pg3200.txt +houses! pg100.txt +houses, pg3200.txt, pg100.txt +houses-- pg3200.txt +houses--to pg3200.txt +houses. pg3200.txt, pg100.txt +houses." pg3200.txt +houses: pg100.txt +houses; pg3200.txt, pg100.txt +housetop pg3200.txt +housetop. pg3200.txt +housetops pg3200.txt +housewife pg3200.txt +housewife, pg3200.txt +housewife; pg100.txt +housework?" pg3200.txt +hove pg3200.txt +hovel. pg100.txt +hovel; pg100.txt +hovel]. pg100.txt +hovels pg3200.txt +hovels, pg3200.txt +hover pg3200.txt +hovered. pg100.txt +hoverin' pg3200.txt +hovering pg3200.txt +hovey, pg3200.txt +how! pg100.txt +how!" pg3200.txt +how's pg3200.txt +how, pg31100.txt, pg3200.txt, pg100.txt +how-- pg3200.txt +how. pg31100.txt, pg3200.txt, pg100.txt +how." pg3200.txt +how: pg3200.txt +how; pg100.txt +how? pg3200.txt, pg100.txt +how?" pg3200.txt +how?--'looking pg3200.txt +howajji pg3200.txt +howajji, pg3200.txt +howard pg3200.txt +howard's pg3200.txt +howard. pg3200.txt +howard: pg3200.txt +howard? pg3200.txt +howards's pg3200.txt +howdy, pg3200.txt +howdy-do pg3200.txt +howdy." pg3200.txt +howells pg3200.txt +howells's pg3200.txt +howells's, pg3200.txt +howells): pg3200.txt +howells, pg3200.txt +howells. pg3200.txt +howells." pg3200.txt +howells;" pg3200.txt +howellses pg3200.txt +howellses. pg3200.txt +however pg31100.txt, pg3200.txt +however, pg31100.txt, pg3200.txt, pg100.txt +however," pg31100.txt +however--. pg31100.txt +however--for pg31100.txt +however--to pg3200.txt +however. pg31100.txt, pg3200.txt +however." pg31100.txt +however; pg31100.txt, pg3200.txt +howitzer pg3200.txt +howitzer. pg3200.txt +howl pg3200.txt +howl, pg3200.txt, pg100.txt +howl. pg3200.txt, pg100.txt +howland, pg3200.txt +howled, pg3200.txt +howler. pg3200.txt +howling pg3200.txt +howling, pg3200.txt +howling. pg100.txt +howlings pg3200.txt +howlings, pg3200.txt +hows pg31100.txt +howsoe'er pg100.txt +howsoe'er! pg100.txt +howsoe'er, pg100.txt +howsoever pg3200.txt, pg100.txt +http://gutenberg.net/license). pg3200.txt +http://gutenberg.org/license). pg31100.txt +http://pglaf.org pg31100.txt, pg3200.txt, pg100.txt +http://pglaf.org/donate pg31100.txt, pg3200.txt, pg100.txt +http://www.gutenberg.net pg3200.txt +http://www.gutenberg.net/3/2/0/3200/ pg3200.txt +http://www.gutenberg.org pg31100.txt, pg100.txt +http://www.gutenberg.org/1/0/100/ pg100.txt +http://www.gutenberg.org/1/0/2/3/10234 pg100.txt +http://www.gutenberg.org/2/4/6/8/24689 pg100.txt +http://www.gutenberg.org/3/1/1/0/31100/ pg31100.txt +http://www.gutenberg.org/catalog/ pg3200.txt +http://www.gutenberg.org/gutindex.all pg100.txt +http://www.gutenberg.org/license). pg100.txt +http://www.ibiblio.org/gutenberg/etext06 pg100.txt +http://www.pglaf.org. pg31100.txt, pg3200.txt, pg100.txt +hu- pg3200.txt +hualaiai. pg3200.txt +hubbard pg3200.txt +hubbub pg3200.txt +hubert pg100.txt +hubert's. pg100.txt +hubert, pg100.txt +hubert. pg100.txt +hubert; pg100.txt +huck pg3200.txt +huck!" pg3200.txt +huck's pg3200.txt +huck, pg3200.txt +huck. pg3200.txt +huck." pg3200.txt +huck? pg3200.txt +huck?" pg3200.txt +huckleberry pg3200.txt +huckleberry!" pg3200.txt +huckleberry. pg3200.txt +hucky!" pg3200.txt +huddle pg3200.txt +huddled pg3200.txt +hudson pg3200.txt +hudson, pg3200.txt +hue pg3200.txt, pg100.txt +hue, pg100.txt +hue." pg31100.txt, pg3200.txt +hue; pg100.txt +hue? pg100.txt +hued pg3200.txt +hues pg3200.txt +huff, pg3200.txt +huffy pg3200.txt +hug pg3200.txt, pg100.txt +hug, pg3200.txt +huge pg3200.txt +huge! pg100.txt +huge, pg3200.txt, pg100.txt +huge-bodied, pg3200.txt +hugged pg3200.txt +hugging pg3200.txt +hugh pg3200.txt, pg100.txt +hugh! pg100.txt +hugh, pg3200.txt +hugh. pg100.txt +hugh? pg100.txt +hughes pg31100.txt +hughes." pg31100.txt, pg3200.txt +hugo pg3200.txt +hugo! pg3200.txt +hugo's pg3200.txt +hugo, pg3200.txt +hugo. pg3200.txt +huguenot pg3200.txt +huguenot, pg3200.txt +hula--a pg3200.txt +huleh pg3200.txt +hulflosen pg3200.txt +hulk." pg31100.txt +hulking pg3200.txt +hulling pg100.txt +hum pg3200.txt +hum! pg100.txt +hum!" pg3200.txt +hum, pg100.txt +hum. pg3200.txt +human pg31100.txt, pg3200.txt +human!" pg3200.txt +human, pg3200.txt, pg100.txt +human- pg100.txt +human. pg3200.txt, pg100.txt +human: pg3200.txt +human; pg3200.txt +humane pg3200.txt, pg100.txt +humaner. pg3200.txt +humanity pg31100.txt, pg3200.txt, pg100.txt +humanity), pg3200.txt +humanity, pg31100.txt, pg3200.txt +humanity--six pg3200.txt +humanity. pg3200.txt, pg100.txt +humanity: pg3200.txt +humanity; pg3200.txt +humans pg3200.txt +humans. pg3200.txt +humble pg31100.txt, pg3200.txt, pg100.txt +humble, pg31100.txt, pg3200.txt, pg100.txt +humble-bee, pg100.txt +humble-bees, pg100.txt +humble-mouth'd; pg100.txt +humble. pg100.txt +humbled pg3200.txt, pg100.txt +humbled, pg3200.txt +humbled. pg3200.txt, pg100.txt +humbleness, pg100.txt +humbleness. pg100.txt +humbler pg3200.txt +humbler; pg100.txt +humblest pg3200.txt +humbling pg31100.txt, pg3200.txt +humbly pg3200.txt, pg100.txt +humbly-- pg3200.txt +humbly: pg3200.txt +humboldt pg3200.txt +humboldt, pg3200.txt +humboldt. pg3200.txt +humboldt." pg3200.txt +humboldt? pg3200.txt +humbug pg3200.txt +humbug! pg3200.txt +humbug. pg3200.txt +humbuggery pg3200.txt +humbugs pg3200.txt +humbugs. pg3200.txt +hume pg100.txt +hume! pg100.txt +hume, pg100.txt +hume. pg3200.txt +humh! pg100.txt +humiliated pg3200.txt +humiliating pg31100.txt +humiliatingly pg3200.txt +humiliation pg31100.txt, pg3200.txt +humiliation! pg3200.txt +humiliation, pg3200.txt +humiliation. pg3200.txt +humiliation: pg3200.txt +humiliation; pg3200.txt +humility pg31100.txt, pg100.txt +humility, pg3200.txt, pg100.txt +humility. pg31100.txt, pg3200.txt, pg100.txt +humility; pg100.txt +humility? pg100.txt +hummed pg3200.txt +humming pg3200.txt +humming, pg100.txt +humor pg3200.txt +humor, pg3200.txt, pg100.txt +humor. pg3200.txt, pg100.txt +humor." pg3200.txt +humor: pg100.txt +humor; pg3200.txt +humored pg3200.txt +humorisms. pg3200.txt +humorist pg3200.txt +humorist." pg3200.txt +humorists pg3200.txt +humorists; pg3200.txt +humorous pg3200.txt, pg100.txt +humorous-story pg3200.txt +humorous. pg100.txt +humorous? pg3200.txt +humorously pg3200.txt +humors pg100.txt +humors, pg3200.txt +humour pg31100.txt, pg3200.txt, pg100.txt +humour, pg31100.txt, pg100.txt +humour. pg31100.txt, pg100.txt +humour." pg31100.txt +humour; pg3200.txt +humouring pg31100.txt +humourist.--yes, pg31100.txt +humourists pg100.txt +humours pg100.txt +humours, pg31100.txt, pg100.txt +humours. pg100.txt +humours; pg100.txt +hump pg3200.txt +humphrey pg3200.txt, pg100.txt +humphrey!' pg100.txt +humphrey, pg100.txt +humphrey. pg100.txt +humphrey; pg100.txt +humps pg3200.txt +hums pg100.txt +hun--" pg3200.txt +hunched pg3200.txt +hund pg3200.txt +hundred pg31100.txt, pg3200.txt, pg100.txt +hundred! pg3200.txt +hundred!" pg3200.txt +hundred!' pg3200.txt +hundred!--pile pg3200.txt +hundred, pg3200.txt +hundred--" pg3200.txt +hundred--and pg3200.txt +hundred--let pg3200.txt +hundred--say pg3200.txt +hundred-dollar pg3200.txt +hundred-yard pg3200.txt +hundred. pg31100.txt, pg3200.txt, pg100.txt +hundred." pg3200.txt +hundred; pg100.txt +hundreds pg31100.txt, pg3200.txt +hundreds, pg3200.txt +hundreds. pg31100.txt, pg3200.txt, pg100.txt +hundredth pg3200.txt, pg100.txt +hung pg31100.txt, pg3200.txt, pg100.txt +hung, pg100.txt +hung." pg3200.txt +hungarians pg3200.txt +hungary's! pg100.txt +hungary. pg3200.txt +hunger pg31100.txt, pg3200.txt, pg100.txt +hunger, pg3200.txt, pg100.txt +hunger-inspired pg3200.txt +hunger. pg3200.txt +hunger; pg3200.txt +hungerford. pg100.txt +hungerford; pg100.txt +hungering. pg3200.txt +hungerly pg100.txt +hungrier pg3200.txt +hungriest pg3200.txt +hungrily pg3200.txt +hungry pg3200.txt, pg100.txt +hungry!" pg3200.txt +hungry, pg3200.txt +hungry--" pg3200.txt +hungry. pg3200.txt, pg100.txt +hungry." pg3200.txt +hungry; pg31100.txt, pg3200.txt +hungry? pg3200.txt +hungry?" pg3200.txt +hunk pg3200.txt +hunsford pg31100.txt +hunsford, pg31100.txt +hunsford. pg31100.txt +hunsford." pg31100.txt +hunt pg3200.txt, pg100.txt +hunt! pg3200.txt +hunt's pg3200.txt +hunt, pg3200.txt +hunt,' pg3200.txt +hunt- pg100.txt +hunt--assault pg3200.txt +hunt--much pg3200.txt +hunt. pg3200.txt, pg100.txt +hunt." pg31100.txt, pg3200.txt +hunt; pg100.txt +hunt? pg31100.txt, pg100.txt +hunted pg3200.txt, pg100.txt +hunter pg31100.txt, pg3200.txt +hunter's pg3200.txt +hunter, pg3200.txt, pg100.txt +hunter. pg31100.txt, pg100.txt +hunter." pg31100.txt +hunter: pg3200.txt +hunters pg3200.txt +hunters, pg31100.txt +huntin' pg3200.txt +hunting pg31100.txt, pg3200.txt, pg100.txt +hunting, pg100.txt +hunting-ground. pg3200.txt +hunting. pg31100.txt, pg100.txt +huntingdon pg3200.txt +huntington's pg3200.txt +huntingtons pg3200.txt +hunts." pg31100.txt +huntsman pg3200.txt, pg100.txt +huntsman, pg31100.txt +huntsmen pg100.txt +hur--" pg3200.txt +huray! pg3200.txt +hurdy-gurdy pg3200.txt +hurl pg3200.txt, pg100.txt +hurled pg3200.txt +hurls pg3200.txt +hurls. pg100.txt +hurrah pg3200.txt +hurrah! pg3200.txt +hurrahs. pg3200.txt +hurricane pg3200.txt +hurricane, pg3200.txt +hurricane-deck. pg3200.txt +hurricane. pg3200.txt +hurricane; pg3200.txt +hurricanes pg3200.txt +hurried pg31100.txt, pg3200.txt, pg100.txt +hurried. pg3200.txt +hurriedly pg3200.txt +hurries pg100.txt +hurry pg31100.txt, pg3200.txt +hurry!" pg3200.txt +hurry, pg31100.txt, pg3200.txt +hurry--one pg3200.txt +hurry. pg3200.txt +hurry." pg3200.txt +hurry.' pg3200.txt +hurry; pg31100.txt, pg3200.txt +hurrying pg31100.txt, pg3200.txt +hurst pg31100.txt +hurst, pg31100.txt +hurst?" pg31100.txt +hurt pg31100.txt, pg3200.txt, pg100.txt +hurt, pg3200.txt, pg100.txt +hurt-- pg3200.txt +hurt--and pg3200.txt +hurt. pg3200.txt, pg100.txt +hurt." pg3200.txt +hurt.' pg3200.txt +hurt; pg3200.txt, pg100.txt +hurt? pg100.txt +hurt?" pg3200.txt +hurting. pg3200.txt +hurtling pg100.txt +hurts pg3200.txt, pg100.txt +hurts, pg3200.txt, pg100.txt +hurts. pg3200.txt +hurts?" pg3200.txt +hus--' pg3200.txt +husband pg31100.txt, pg3200.txt, pg100.txt +husband! pg100.txt +husband!" pg3200.txt +husband!' pg100.txt +husband'? pg100.txt +husband's pg31100.txt, pg3200.txt, pg100.txt +husband's! pg100.txt +husband) pg31100.txt +husband, pg31100.txt, pg3200.txt, pg100.txt +husband- pg100.txt +husband. pg31100.txt, pg3200.txt, pg100.txt +husband." pg31100.txt, pg3200.txt +husband; pg3200.txt, pg100.txt +husband? pg31100.txt, pg100.txt +husband?" pg3200.txt +husbanded? pg100.txt +husbandry pg3200.txt, pg100.txt +husbandry. pg100.txt +husbandry? pg100.txt +husbands pg31100.txt, pg3200.txt, pg100.txt +husbands! pg100.txt +husbands, pg3200.txt, pg100.txt +husbands. pg31100.txt, pg3200.txt, pg100.txt +husbands." pg31100.txt +husbands; pg100.txt +husbun' pg3200.txt +hush pg3200.txt +hush! pg100.txt +hush!" pg31100.txt +hush, pg3200.txt +hush. pg3200.txt, pg100.txt +hushed pg3200.txt +husks pg100.txt +huss: pg3200.txt +hussite, pg3200.txt +hussy! pg3200.txt +hussy!" pg3200.txt +hussy, pg3200.txt +hussy--can't pg3200.txt +hustled pg3200.txt +huswife. pg100.txt +huswifes pg100.txt +hut pg3200.txt +hut" pg3200.txt +hut-keeper, pg3200.txt +hut. pg3200.txt +hutchinson. pg3200.txt +huts pg3200.txt +huts, pg3200.txt +huvvering pg3200.txt +huzza pg3200.txt +huzza'd pg3200.txt +huzzah! pg3200.txt +huzzaing pg3200.txt +huzzaing. pg3200.txt +huzzas, pg3200.txt +huzzy pg3200.txt +hy'roglyphics." pg3200.txt +hyacinth." pg31100.txt +hybernates pg3200.txt +hybernian, pg3200.txt +hyde pg3200.txt +hyde's pg3200.txt +hyde, pg3200.txt +hyderabad; pg3200.txt +hydra. pg3200.txt +hydraulic!" pg3200.txt +hyen, pg100.txt +hyena." pg3200.txt +hyenas; pg3200.txt +hyland. pg3200.txt +hym, pg3200.txt +hymen pg100.txt +hymn pg3200.txt +hymn-books, pg3200.txt +hymn-singing, pg3200.txt +hymn. pg100.txt +hymn; pg100.txt +hymns pg3200.txt +hymns. pg3200.txt +hymns?" pg3200.txt +hyperbolical, pg100.txt +hyperion. pg100.txt +hyphens pg3200.txt +hyphens; pg3200.txt +hypnotism; pg3200.txt +hypnotists; pg3200.txt +hypocrisy pg3200.txt +hypocrisy, pg100.txt +hypocrisy. pg100.txt +hypocrisy; pg100.txt +hypocrite! pg100.txt +hypocrite. pg31100.txt, pg100.txt +hypocrites- pg100.txt +hypothesis pg3200.txt +hypothesis, pg3200.txt +hyrcan pg3200.txt +hyrcania. pg100.txt +hys pg3200.txt +hysterics pg31100.txt +i! pg100.txt +i!" pg3200.txt +i" pg3200.txt +i' pg100.txt +i'd pg31100.txt, pg3200.txt, pg100.txt +i'd, pg3200.txt +i'd--" pg3200.txt +i'll pg3200.txt, pg100.txt +i'll--" pg3200.txt +i'll----" pg3200.txt +i'm pg31100.txt, pg3200.txt +i'm--" pg3200.txt +i's pg3200.txt +i've pg3200.txt +i've--" pg3200.txt +i) pg31100.txt +i). pg31100.txt +i, pg31100.txt, pg3200.txt, pg100.txt +i," pg31100.txt, pg100.txt +i,-- pg3200.txt +i,--be pg31100.txt +i- pg100.txt +i-- pg3200.txt +i--" pg3200.txt +i--' pg3200.txt +i--'" pg3200.txt +i--(leaving.) pg3200.txt +i--care pg3200.txt +i--er--" pg3200.txt +i--i--couldn't pg3200.txt +i--i--thought pg3200.txt +i--still, pg3200.txt +i. pg31100.txt, pg3200.txt, pg100.txt +i." pg31100.txt, pg3200.txt +i.) pg31100.txt +i.). pg31100.txt +i.- pg100.txt +i.--i pg31100.txt +i: pg3200.txt, pg100.txt +i; pg3200.txt, pg100.txt +i? pg3200.txt, pg100.txt +i?" pg31100.txt, pg3200.txt +i?"--and pg31100.txt +i?--anne, pg31100.txt +i?--you pg3200.txt +ia.: pg3200.txt +iachimo pg100.txt +iachimo! pg100.txt +iachimo, pg100.txt +iachimo. pg100.txt +iago! pg100.txt +iago, pg100.txt +iago. pg100.txt +iago; pg100.txt +iago? pg100.txt +iamo pg3200.txt +iamo, pg3200.txt +iamoqua, pg3200.txt +ianthe, pg3200.txt +iao pg3200.txt +ib. pg3200.txt +icarus; pg100.txt +ice pg3200.txt, pg100.txt +ice, pg3200.txt +ice--hard, pg3200.txt +ice-arch pg3200.txt +ice-blocks.' pg3200.txt +ice-blocks?' pg3200.txt +ice-cavern, pg3200.txt +ice-cream pg3200.txt +ice-fields pg3200.txt +ice-house, pg3200.txt +ice-man pg3200.txt +ice-pitcher. pg3200.txt +ice-storm pg3200.txt +ice-storm. pg3200.txt +ice-water. pg3200.txt +ice-water." pg3200.txt +ice. pg3200.txt, pg100.txt +ice; pg3200.txt, pg100.txt +ice?' pg3200.txt +iceberg pg3200.txt +iceberg. pg3200.txt +icebergs pg3200.txt +iceland pg3200.txt +iceland! pg100.txt +ices pg3200.txt +ich, pg3200.txt +ich-- pg3200.txt +icicle pg3200.txt, pg100.txt +icicles pg3200.txt, pg100.txt +icicles, pg100.txt +icing pg3200.txt +iconoclasm pg3200.txt +icy pg3200.txt +id'eal pg3200.txt +id.: pg3200.txt +idaho pg3200.txt +idea pg31100.txt, pg3200.txt +idea! pg31100.txt, pg3200.txt +idea!" pg31100.txt +idea, pg31100.txt, pg3200.txt +idea--i pg31100.txt, pg3200.txt +idea--namely, pg3200.txt +idea--she pg3200.txt +idea--to pg3200.txt +idea. pg31100.txt, pg3200.txt +idea." pg31100.txt, pg3200.txt +idea.--did pg31100.txt +idea.... pg3200.txt +idea..... pg3200.txt +idea: pg3200.txt +idea; pg31100.txt, pg3200.txt +idea?" pg31100.txt, pg3200.txt +idea?' pg3200.txt +ideal--call pg3200.txt +ideal. pg3200.txt +ideals pg3200.txt +ideals! pg3200.txt +ideals, pg3200.txt +ideals--these, pg3200.txt +ideals. pg3200.txt +ideas pg31100.txt, pg3200.txt +ideas!" pg31100.txt +ideas, pg31100.txt, pg3200.txt +ideas. pg3200.txt +ideas." pg31100.txt +idee pg3200.txt +iden pg100.txt +identical." pg3200.txt +identification pg31100.txt, pg3200.txt, pg100.txt +identified pg3200.txt +identified." pg3200.txt +identify pg3200.txt +identify, pg3200.txt +identity pg3200.txt +identity. pg3200.txt +ides pg3200.txt +idiom--mark pg3200.txt +idiot pg3200.txt, pg100.txt +idiot!" pg3200.txt +idiot), pg3200.txt +idiot, pg31100.txt, pg3200.txt +idiot--just pg3200.txt +idiot. pg3200.txt +idiot." pg3200.txt +idiot; pg100.txt +idiot?" pg3200.txt +idiot?' pg3200.txt +idiotic pg3200.txt +idiotic!" pg3200.txt +idiotic. pg3200.txt +idiotically pg3200.txt +idiots pg3200.txt +idiots, pg3200.txt +idiots. pg3200.txt +idiots; pg3200.txt +idle pg31100.txt, pg3200.txt, pg100.txt +idle, pg3200.txt, pg100.txt +idle--and pg3200.txt +idle. pg3200.txt, pg100.txt +idle; pg31100.txt, pg3200.txt +idle? pg100.txt +idleness pg31100.txt, pg3200.txt +idleness, pg100.txt +idleness. pg100.txt +idleness; pg100.txt +idlers pg3200.txt +idles pg3200.txt +idling pg3200.txt +idly pg31100.txt +idol pg3200.txt, pg100.txt +idol! pg3200.txt +idol. pg3200.txt +idolater pg3200.txt +idolater. pg3200.txt +idolatrous pg3200.txt +idolatry pg3200.txt, pg100.txt +idolatry, pg100.txt +idolatry. pg3200.txt, pg100.txt +idolized pg31100.txt, pg3200.txt +idols pg3200.txt +idols, pg3200.txt +idols--and pg3200.txt +idols; pg3200.txt +idyllic pg3200.txt +idyllic, pg3200.txt +idyllic. pg3200.txt +if! pg100.txt +if, pg31100.txt, pg3200.txt, pg100.txt +if," pg3200.txt +if-- pg3200.txt +if--" pg31100.txt, pg3200.txt +if--"if pg3200.txt +if----" pg3200.txt +if--strange pg31100.txt +if. pg100.txt +iggensbach pg3200.txt +ignoble pg3200.txt +ignoble. pg3200.txt +ignominy pg3200.txt +ignomy. pg100.txt +ignoramus. pg3200.txt +ignorance pg31100.txt, pg3200.txt, pg100.txt +ignorance! pg31100.txt, pg3200.txt, pg100.txt +ignorance, pg31100.txt, pg3200.txt, pg100.txt +ignorance. pg31100.txt, pg3200.txt, pg100.txt +ignorance." pg31100.txt +ignorance."] pg3200.txt +ignorant pg31100.txt, pg3200.txt, pg100.txt +ignorant, pg31100.txt, pg3200.txt, pg100.txt +ignorant- pg100.txt +ignorant. pg31100.txt, pg3200.txt, pg100.txt +ignorant? pg3200.txt +ignorantly pg3200.txt +ignore pg3200.txt +ignored pg3200.txt +ignoring pg3200.txt +ihnen pg3200.txt +ii pg31100.txt, pg3200.txt +ii, pg31100.txt, pg3200.txt +ii. pg3200.txt, pg100.txt +ii., pg3200.txt +iii pg31100.txt, pg3200.txt, pg100.txt +iii. pg3200.txt, pg100.txt +iii., pg3200.txt +ilbow. pg100.txt +iliad. pg3200.txt +ilion. pg100.txt +ilion; pg100.txt +ilium pg3200.txt +ilium, pg100.txt +ilium. pg3200.txt +ilium? pg100.txt +ill! pg31100.txt, pg100.txt +ill!'--which pg31100.txt +ill, pg31100.txt, pg3200.txt, pg100.txt +ill," pg31100.txt +ill--for pg31100.txt +ill--prostrated pg3200.txt +ill-bred pg31100.txt +ill-bred, pg3200.txt +ill-bred." pg31100.txt +ill-breeding, pg31100.txt +ill-chosen pg31100.txt, pg3200.txt +ill-disposed, pg31100.txt +ill-fame. pg31100.txt +ill-fated pg31100.txt +ill-favour'd. pg100.txt +ill-favoured pg100.txt +ill-favouredly. pg100.txt +ill-headed; pg100.txt +ill-health pg31100.txt +ill-health. pg31100.txt +ill-humour pg31100.txt +ill-humour, pg31100.txt +ill-humour. pg31100.txt +ill-judged pg31100.txt +ill-luck pg3200.txt +ill-luck, pg3200.txt +ill-managed. pg3200.txt +ill-natured, pg31100.txt, pg3200.txt +ill-naturedly pg3200.txt +ill-opinion, pg31100.txt +ill-shapen, pg3200.txt +ill-tempered pg31100.txt +ill-timed pg31100.txt +ill-timed; pg31100.txt +ill-usage,) pg31100.txt +ill-usage. pg31100.txt +ill-usage; pg31100.txt +ill-will, pg31100.txt +ill-wishers pg3200.txt +ill-written, pg31100.txt +ill. pg31100.txt, pg3200.txt, pg100.txt +ill." pg31100.txt, pg3200.txt +ill.--she pg31100.txt +ill: pg100.txt +ill; pg100.txt +ill?" pg31100.txt +illegal pg3200.txt +illegality pg3200.txt +illegible. pg3200.txt +illegitimacy, pg31100.txt +illiberal; pg31100.txt +illinois pg3200.txt +illinois, pg3200.txt +illinois. pg3200.txt +illiteracy pg3200.txt +illiterate pg31100.txt, pg3200.txt +illiterate, pg3200.txt +illness pg31100.txt, pg3200.txt +illness, pg31100.txt, pg3200.txt +illness," pg31100.txt +illness--if pg31100.txt +illness. pg3200.txt +illness." pg31100.txt +illness; pg31100.txt +illnesses pg3200.txt +illogical, pg3200.txt +ills pg3200.txt, pg100.txt +ills. pg3200.txt +illuminate pg100.txt +illuminate, pg3200.txt +illuminated pg3200.txt +illuminated, pg3200.txt +illuminating pg3200.txt +illuminations, pg3200.txt +illuminations. pg3200.txt +illus- pg3200.txt +illusion pg3200.txt, pg100.txt +illusion! pg100.txt +illusion, pg3200.txt +illusion? pg3200.txt +illusions pg100.txt +illusions, pg3200.txt +illusions. pg100.txt +illusions; pg3200.txt +illustrate pg3200.txt +illustrate. pg3200.txt +illustrate: pg3200.txt +illustrated pg31100.txt, pg3200.txt +illustrated, pg3200.txt +illustrated. pg3200.txt +illustrated?" pg3200.txt +illustrates pg3200.txt +illustrating. pg3200.txt +illustration pg31100.txt, pg3200.txt +illustration. pg3200.txt +illustration." pg3200.txt +illustration: pg3200.txt +illustrations pg31100.txt, pg3200.txt +illustrations, pg3200.txt +illustrations. pg3200.txt +illustrious pg3200.txt +illustrious" pg3200.txt +illustrious--" pg3200.txt +illustrious?" pg3200.txt +illyria pg100.txt +illyria. pg100.txt +illyria? pg100.txt +im--" pg3200.txt +image pg31100.txt, pg3200.txt, pg100.txt +image! pg100.txt +image, pg3200.txt, pg100.txt +image." pg3200.txt +imaged pg31100.txt +imagery pg3200.txt +imagery, pg3200.txt +images pg31100.txt, pg100.txt +images, pg3200.txt, pg100.txt +images; pg100.txt +imaginable pg31100.txt, pg3200.txt +imaginable. pg3200.txt +imaginable; pg3200.txt +imaginary pg31100.txt, pg3200.txt +imaginary, pg31100.txt +imaginary. pg100.txt +imaginary." pg3200.txt +imaginary.' pg3200.txt +imaginary.] pg3200.txt +imagination pg31100.txt, pg3200.txt, pg100.txt +imagination) pg3200.txt +imagination, pg31100.txt, pg3200.txt, pg100.txt +imagination. pg31100.txt, pg3200.txt, pg100.txt +imagination." pg3200.txt +imagination.' pg3200.txt +imagination: pg3200.txt +imagination; pg31100.txt, pg3200.txt +imaginations pg31100.txt, pg100.txt +imaginations. pg31100.txt +imaginations; pg3200.txt +imagine pg31100.txt, pg3200.txt, pg100.txt +imagine, pg31100.txt, pg3200.txt +imagine- pg100.txt +imagine--" pg31100.txt +imagine. pg31100.txt, pg3200.txt, pg100.txt +imagine." pg31100.txt, pg3200.txt +imagine; pg31100.txt +imagined pg31100.txt, pg3200.txt +imagined) pg31100.txt +imagined, pg3200.txt +imagined. pg31100.txt, pg3200.txt +imagined." pg31100.txt +imagined? pg31100.txt +imagines pg3200.txt +imagines. pg3200.txt +imagining pg31100.txt, pg3200.txt +imagining, pg3200.txt +imagining--that pg3200.txt +imagining. pg31100.txt +imaginings pg3200.txt +imaginings. pg3200.txt +imaginings: pg100.txt +imbecile pg3200.txt +imbeciles pg3200.txt +imbecility, pg3200.txt, pg100.txt +imbecility. pg3200.txt +imbedded pg3200.txt +imbibed pg3200.txt +imbrue? pg100.txt +imitate pg31100.txt, pg3200.txt +imitated pg3200.txt, pg100.txt +imitating pg3200.txt +imitation pg3200.txt +imitation, pg3200.txt +imitation--chinese pg3200.txt +imitation. pg3200.txt, pg100.txt +imitation." pg31100.txt +imitations, pg100.txt +immaculate, pg100.txt +immask pg100.txt +immaterial pg100.txt +immeasurably pg3200.txt +immediate pg31100.txt, pg3200.txt, pg100.txt +immediate, pg31100.txt, pg3200.txt +immediate. pg31100.txt +immediately pg31100.txt, pg3200.txt, pg100.txt +immediately! pg100.txt +immediately, pg31100.txt, pg3200.txt +immediately. pg31100.txt, pg3200.txt, pg100.txt +immediately." pg31100.txt +immediately.' pg3200.txt +immediately; pg31100.txt, pg3200.txt +immense pg31100.txt, pg3200.txt +immense, pg31100.txt, pg3200.txt +immense--incalculable. pg3200.txt +immensely pg3200.txt +immersion pg3200.txt +immigrant, pg3200.txt +immigration, pg3200.txt +immigration. pg3200.txt +imminence pg3200.txt +imminent pg3200.txt +imminent! pg3200.txt +imminent!! pg3200.txt +imminent!!! pg3200.txt +imminent, pg3200.txt +imminent--so pg3200.txt +imminent--why pg3200.txt +imminent. pg3200.txt, pg100.txt +immoderately pg31100.txt +immoderately?" pg3200.txt +immodest. pg3200.txt +immodesty pg3200.txt +immodesty, pg3200.txt +immoral pg3200.txt +immoral! pg3200.txt +immoral, pg3200.txt +immoral,) pg3200.txt +immoral. pg3200.txt +immorality pg3200.txt +immortal pg3200.txt +immortal!" pg3200.txt +immortal, pg3200.txt +immortal. pg3200.txt +immortalised pg31100.txt +immortality pg31100.txt, pg3200.txt +immortality!" pg3200.txt +immortality. pg3200.txt +immortality? pg3200.txt +immortaliz'd. pg100.txt +immortalize pg3200.txt +immortally pg100.txt +immortals pg3200.txt +immortelles. pg3200.txt +immovable pg31100.txt +immovable. pg31100.txt, pg3200.txt +immur'd, pg100.txt +immured, pg100.txt +immures pg100.txt +imogen pg100.txt +imogen! pg100.txt +imogen, pg100.txt +imogen. pg100.txt +imogen; pg100.txt +imogen? pg100.txt +imp, pg100.txt +imp. pg100.txt +impair pg3200.txt +impaired pg3200.txt +impaired--. pg31100.txt +impaired. pg3200.txt +impanelled pg100.txt +impart pg31100.txt, pg3200.txt, pg100.txt +impart. pg100.txt +impart: pg100.txt +imparted pg3200.txt +impartial pg31100.txt, pg3200.txt +impartial, pg3200.txt +impartiality pg31100.txt, pg3200.txt +impartiality, pg31100.txt +impartially pg31100.txt, pg3200.txt +impassable, pg31100.txt +impassive pg3200.txt +impassive. pg3200.txt +impatience pg31100.txt, pg3200.txt, pg100.txt +impatience, pg3200.txt, pg100.txt +impatience--tell pg3200.txt +impatience. pg3200.txt, pg100.txt +impatience: pg3200.txt +impatience; pg3200.txt, pg100.txt +impatience? pg100.txt +impatient pg31100.txt, pg3200.txt, pg100.txt +impatient, pg3200.txt, pg100.txt +impatient. pg31100.txt, pg3200.txt, pg100.txt +impatient." pg3200.txt +impatiently pg31100.txt +impatiently, pg31100.txt +impatiently-- pg3200.txt +impatiently. pg31100.txt +impatiently: pg3200.txt +impawn'd pg100.txt +impeach pg100.txt +impeachment pg3200.txt +impede pg3200.txt +impeded pg3200.txt +impediment pg100.txt +impediment, pg3200.txt, pg100.txt +impediment. pg31100.txt +impediment; pg100.txt +impediments pg31100.txt, pg100.txt +impelled, pg31100.txt +impending pg3200.txt +impending. pg3200.txt +impending:] pg3200.txt +impenetrable pg31100.txt, pg3200.txt +imperative, pg3200.txt +imperative. pg3200.txt +imperative; pg3200.txt +imperceiverant pg100.txt +imperceptible pg3200.txt +imperceptibly pg3200.txt +imperfect pg100.txt +imperfect, pg100.txt +imperfect." pg3200.txt +imperfection pg31100.txt +imperfections pg3200.txt, pg100.txt +imperial pg3200.txt +imperial, pg100.txt +imperial: pg100.txt +imperialists, pg3200.txt +imperiled. pg3200.txt +imperiously-- pg3200.txt +imperiously? pg100.txt +imperishable pg3200.txt +impertinence pg31100.txt +impertinence, pg31100.txt, pg3200.txt +impertinence. pg31100.txt +impertinence; pg31100.txt +impertinence?" pg31100.txt +impertinent pg31100.txt, pg3200.txt +impertinent. pg100.txt +impertinent." pg31100.txt +impertinently pg31100.txt +impertinents pg3200.txt +impetuosity. pg100.txt +impetuous pg31100.txt +impetuously: pg3200.txt +impiety pg100.txt +impiety, pg100.txt +impiety; pg100.txt +impishly pg3200.txt +implacability pg31100.txt +implacable pg3200.txt +implacable. pg3200.txt +implement pg3200.txt +implements pg3200.txt +implicated pg3200.txt +implication pg3200.txt +implicitly pg31100.txt +implicitly, pg31100.txt +implied pg31100.txt, pg3200.txt, pg100.txt +implied, pg31100.txt, pg100.txt +implies pg3200.txt +implies, pg3200.txt, pg100.txt +implies. pg3200.txt +implies." pg31100.txt +implore pg3200.txt +implored pg3200.txt +imploring pg3200.txt +imploring, pg3200.txt +imploring. pg3200.txt +imploring; pg3200.txt +imply pg31100.txt, pg3200.txt +imply." pg31100.txt +impolite pg3200.txt +impoliteness. pg3200.txt +import pg3200.txt, pg100.txt +import, pg100.txt +import. pg31100.txt, pg3200.txt, pg100.txt +import: pg3200.txt, pg100.txt +import; pg3200.txt +import? pg100.txt +importance pg31100.txt, pg3200.txt +importance! pg31100.txt +importance, pg31100.txt, pg3200.txt, pg100.txt +importance--all pg3200.txt +importance--for pg3200.txt +importance. pg31100.txt, pg3200.txt +importance." pg31100.txt +important pg31100.txt, pg3200.txt +important, pg31100.txt, pg3200.txt +important--certainly pg3200.txt +important. pg31100.txt, pg3200.txt +important." pg3200.txt +important; pg31100.txt +importation pg3200.txt +importation. pg3200.txt +importations--have pg3200.txt +imported pg3200.txt, pg100.txt +importing pg31100.txt, pg3200.txt, pg100.txt +imports pg100.txt +importun'd pg100.txt +importunacy? pg100.txt +importunate pg100.txt +importune pg100.txt +importune, pg100.txt +importunes pg100.txt +importunity pg31100.txt +importunity. pg100.txt +importunity; pg100.txt +impos'd pg100.txt +impose pg31100.txt, pg3200.txt +impose, pg100.txt +imposed pg31100.txt, pg3200.txt +imposed. pg31100.txt, pg3200.txt +imposes pg3200.txt +imposing pg3200.txt +imposing, pg3200.txt +imposing--with pg3200.txt +imposing-stone--for pg3200.txt +imposition pg3200.txt +imposition, pg100.txt +impossibilities, pg100.txt +impossibilities. pg3200.txt, pg100.txt +impossibilities; pg3200.txt +impossibilities?" pg3200.txt +impossibility pg31100.txt, pg3200.txt +impossibility, pg31100.txt, pg3200.txt, pg100.txt +impossibility. pg3200.txt, pg100.txt +impossibility; pg3200.txt +impossible pg31100.txt, pg3200.txt, pg100.txt +impossible! pg3200.txt +impossible!" pg31100.txt, pg3200.txt +impossible, pg31100.txt, pg3200.txt, pg100.txt +impossible- pg100.txt +impossible. pg31100.txt, pg3200.txt, pg100.txt +impossible." pg31100.txt, pg3200.txt +impossible.' pg3200.txt +impossible; pg31100.txt, pg3200.txt, pg100.txt +impossible? pg3200.txt +impossibly pg3200.txt +impostor pg3200.txt, pg100.txt +impostor--has pg3200.txt +impostor. pg3200.txt +impostor." pg3200.txt +impostors pg3200.txt +impostors. pg3200.txt +impostors." pg3200.txt +imposts, pg3200.txt +imposture. pg3200.txt +impotence pg100.txt +impotence. pg3200.txt +impotent. pg3200.txt +impoverished. pg31100.txt +impoverishing pg31100.txt +impoverishment--and pg3200.txt +impracticable. pg31100.txt +imprecations pg3200.txt +impregnable, pg100.txt +impress pg31100.txt, pg3200.txt +impress. pg100.txt +impressed pg31100.txt, pg3200.txt +impressed, pg3200.txt, pg100.txt +impressed--so pg31100.txt +impressed. pg3200.txt +impresses pg3200.txt +impression pg31100.txt, pg3200.txt, pg100.txt +impression, pg31100.txt, pg3200.txt +impression. pg31100.txt, pg3200.txt +impressions pg3200.txt +impressions, pg3200.txt +impressions." pg31100.txt +impressive pg3200.txt +impressive, pg3200.txt +impressive. pg3200.txt +impressive: pg3200.txt +impressive; pg3200.txt +impressively, pg3200.txt +impressively-- pg3200.txt +impressively. pg3200.txt +impressively: pg3200.txt +impressively], pg3200.txt +impressiveness pg3200.txt +impressiveness, pg3200.txt +impressiveness-- pg3200.txt +impressiveness. pg3200.txt +impressure pg100.txt +imprint. pg3200.txt +imprinted pg3200.txt +imprison pg3200.txt +imprison'd pg100.txt +imprison'd, pg100.txt +imprison'd. pg100.txt +imprison't pg100.txt +imprisoned pg3200.txt, pg100.txt +imprisonment pg31100.txt, pg3200.txt, pg100.txt +imprisonment); pg3200.txt +imprisonment, pg100.txt +imprisonment. pg31100.txt, pg3200.txt, pg100.txt +imprisonment; pg100.txt +imprisonment? pg100.txt +imprisons pg3200.txt +improbable pg31100.txt +improbable. pg31100.txt +impromptu pg3200.txt +improper pg31100.txt, pg3200.txt +improper! pg31100.txt +improper, pg3200.txt +improper. pg31100.txt +improper." pg31100.txt +improperly, pg3200.txt +improperly. pg31100.txt +improperly." pg3200.txt +impropriety pg31100.txt +impropriety. pg3200.txt +improve pg31100.txt, pg3200.txt +improve--i pg3200.txt +improve. pg31100.txt, pg3200.txt +improve." pg3200.txt +improve?" pg3200.txt +improved pg31100.txt, pg3200.txt +improved, pg31100.txt +improved--and pg3200.txt +improved--he pg31100.txt +improved--want pg31100.txt +improved. pg31100.txt, pg3200.txt +improved; pg31100.txt, pg3200.txt +improvement pg31100.txt, pg3200.txt +improvement, pg31100.txt, pg3200.txt +improvement. pg31100.txt +improvement? pg3200.txt +improvement?" pg31100.txt +improvements pg31100.txt, pg3200.txt +improvements. pg3200.txt +improver pg31100.txt +improver, pg31100.txt +improves pg3200.txt +improvident pg100.txt +improvident, pg3200.txt +improving pg31100.txt, pg3200.txt +improving, pg3200.txt +improving. pg3200.txt +improvise pg3200.txt +imprudence pg31100.txt, pg3200.txt +imprudence. pg31100.txt +imprudence; pg31100.txt +imprudence? pg31100.txt +imprudencies pg31100.txt +imprudent pg31100.txt +imprudent! pg31100.txt +imprudent, pg31100.txt +imprudent; pg31100.txt +impudence pg31100.txt, pg3200.txt +impudence!" pg3200.txt +impudence, pg100.txt +impudent pg31100.txt, pg3200.txt, pg100.txt +impudent, pg3200.txt +impudently pg3200.txt +impulse pg31100.txt, pg3200.txt +impulse, pg31100.txt +impulse--no, pg3200.txt +impulse. pg3200.txt +impulse; pg3200.txt +impulses pg3200.txt +impulses, pg3200.txt +impulses. pg3200.txt +impulses? pg3200.txt +impunity, pg31100.txt +impunity. pg3200.txt +impunity." pg3200.txt +impure pg3200.txt +impure, pg3200.txt +impure. pg3200.txt +impurities?" pg3200.txt +impurity. pg3200.txt +imputation pg100.txt +imputation, pg100.txt +impute, pg100.txt +imputed pg31100.txt +in! pg3200.txt, pg100.txt +in!!!!!" pg3200.txt +in!" pg3200.txt +in!' pg3200.txt +in" pg3200.txt +in"; pg3200.txt +in's pg100.txt +in't pg100.txt +in't! pg100.txt +in't, pg100.txt +in't- pg100.txt +in't. pg100.txt +in't.- pg100.txt +in't; pg100.txt +in't? pg100.txt +in, pg31100.txt, pg3200.txt, pg100.txt +in," pg3200.txt +in- pg100.txt +in-- pg3200.txt +in--" pg3200.txt +in----!" pg3200.txt +in--and pg3200.txt +in--especially pg3200.txt +in--however, pg3200.txt +in--i pg3200.txt +in--see pg3200.txt +in-a-door, pg100.txt +in-betweens, pg31100.txt +in-margaret. pg100.txt +in. pg31100.txt, pg3200.txt, pg100.txt +in." pg31100.txt, pg3200.txt +in.' pg100.txt +in.'" pg3200.txt +in.) pg3200.txt +in....." pg3200.txt +in.[1] pg3200.txt +in: pg31100.txt, pg3200.txt, pg100.txt +in; pg31100.txt, pg3200.txt, pg100.txt +in? pg31100.txt, pg3200.txt, pg100.txt +in?" pg31100.txt, pg3200.txt +in?--did pg31100.txt +in] pg100.txt +in_, pg31100.txt +inability pg31100.txt, pg3200.txt +inaccessible, pg31100.txt, pg100.txt +inaccessible- pg100.txt +inaccessible. pg3200.txt +inaccuracies pg3200.txt +inaccurate pg31100.txt, pg3200.txt, pg100.txt +inaction. pg3200.txt +inactive pg31100.txt, pg3200.txt +inactivity, pg3200.txt +inadequate. pg3200.txt +inadmissibility pg31100.txt +inadvertence pg3200.txt +inadvertencies pg31100.txt +inalienable pg3200.txt +inane pg3200.txt +inanimate. pg3200.txt +inanition pg3200.txt +inanition, pg3200.txt +inanition; pg3200.txt +inappropriate pg3200.txt +inarticulately, pg3200.txt +inartistically, pg3200.txt +inasmuch pg3200.txt +inattention pg31100.txt +inattention, pg31100.txt +inattentive pg31100.txt +inaudibly, pg3200.txt +inaudibly: pg3200.txt +inauguration pg3200.txt +inc., pg100.txt +incalculable pg3200.txt +incalculable. pg31100.txt, pg3200.txt +incandescent pg3200.txt +incantation, pg3200.txt +incantations pg3200.txt +incantations, pg3200.txt +incantations; pg3200.txt +incapable pg31100.txt, pg3200.txt, pg100.txt +incapable, pg3200.txt +incapable? pg3200.txt +incardinate. pg100.txt +incarnadine, pg100.txt +incarnate. pg100.txt +incarnation pg3200.txt +incarnation, pg3200.txt +incarnations pg3200.txt +incarnations. pg3200.txt +incautiousness." pg31100.txt +incens'd. pg100.txt +incense pg100.txt +incensed pg100.txt +incensed. pg31100.txt +incentive pg3200.txt +incentive. pg3200.txt +incertain, pg100.txt +incertainty, pg100.txt +incessant pg31100.txt +incest. pg100.txt +inch pg31100.txt, pg3200.txt, pg100.txt +inch! pg3200.txt +inch, pg31100.txt, pg3200.txt, pg100.txt +inch. pg100.txt +inch." pg31100.txt +inch; pg3200.txt +inched pg3200.txt +inches pg3200.txt, pg100.txt +inches!" pg3200.txt +inches, pg31100.txt, pg3200.txt +inches. pg3200.txt, pg100.txt +inches; pg31100.txt +incident pg3200.txt +incident, pg3200.txt +incident. pg3200.txt +incident." pg3200.txt +incident: pg3200.txt +incident? pg3200.txt +incidentally pg3200.txt +incidents pg3200.txt +incidents. pg3200.txt +incipient pg3200.txt +incision pg100.txt +incision. pg100.txt +incite pg100.txt +incite, pg100.txt +incivilities.' pg3200.txt +incivility pg31100.txt +inclemency pg3200.txt +inclin'd, pg100.txt +inclin'd. pg100.txt +inclination pg31100.txt, pg3200.txt, pg100.txt +inclination, pg31100.txt +inclination--to pg3200.txt +inclination. pg31100.txt, pg100.txt +inclination." pg31100.txt +inclination; pg100.txt +inclinations pg31100.txt, pg3200.txt +inclinations, pg3200.txt +incline pg31100.txt, pg3200.txt, pg100.txt +incline, pg3200.txt, pg100.txt +incline. pg3200.txt +incline; pg100.txt +inclined pg31100.txt, pg3200.txt +inclined. pg3200.txt, pg100.txt +inclines. pg100.txt +inclips pg100.txt +inclosed pg3200.txt +include pg3200.txt +included pg31100.txt, pg3200.txt, pg100.txt +included--and pg3200.txt +included. pg3200.txt, pg100.txt +included." pg31100.txt +includes pg3200.txt, pg100.txt +including pg31100.txt, pg3200.txt, pg100.txt +inclusive pg100.txt +inclusive. pg3200.txt +incognito pg3200.txt +incoherences. pg3200.txt +incoherent pg31100.txt, pg3200.txt +incoherently pg31100.txt +income pg31100.txt, pg3200.txt +income, pg31100.txt, pg3200.txt +income," pg31100.txt +income. pg31100.txt, pg3200.txt +income." pg31100.txt, pg3200.txt +income.--however, pg31100.txt +income.] pg3200.txt +income; pg31100.txt, pg3200.txt +income?" pg3200.txt +incomes pg31100.txt +incommode pg31100.txt +incommoded pg31100.txt, pg3200.txt +incomparable pg3200.txt +incomparable; pg100.txt +incompatible pg31100.txt +incompetent pg3200.txt +incomplete, pg100.txt +incomplete. pg31100.txt, pg3200.txt +incomprehensible pg31100.txt, pg3200.txt +incomprehensible, pg3200.txt +incomprehensible. pg31100.txt +inconceivable pg31100.txt +inconceivable, pg31100.txt +inconceivable--it pg3200.txt +inconceivable. pg31100.txt, pg3200.txt +incongruities pg3200.txt +incongruity pg3200.txt +incongruous pg3200.txt +incongruous. pg3200.txt +inconsequence, pg3200.txt +inconsequential pg3200.txt +inconsiderable pg31100.txt +inconsiderable, pg31100.txt +inconsiderate pg100.txt +inconsistencies pg3200.txt +inconsistency pg3200.txt +inconsistency, pg3200.txt +inconsistent pg3200.txt +inconsistent, pg31100.txt, pg3200.txt +inconsistent. pg3200.txt +inconsistently pg3200.txt +inconspicuous pg3200.txt +inconspicuous. pg3200.txt +inconstancy pg31100.txt, pg100.txt +inconstancy! pg100.txt +inconstancy- pg100.txt +inconstancy. pg31100.txt, pg100.txt +inconstant, pg100.txt +inconstant." pg31100.txt +incontinency pg100.txt +incontinency. pg100.txt +incontinency; pg100.txt +incontinent. pg100.txt +inconvenience pg31100.txt, pg3200.txt +inconvenience, pg3200.txt, pg100.txt +inconvenience--it pg3200.txt +inconvenience. pg31100.txt, pg3200.txt +inconvenience." pg31100.txt, pg3200.txt +inconvenience; pg31100.txt +inconveniences pg31100.txt, pg100.txt +inconveniences.' pg3200.txt +inconveniences; pg3200.txt +inconvenient pg31100.txt +inconvenient. pg3200.txt +incorporate pg100.txt +incorporate, pg100.txt +incorporated pg3200.txt +incorporating pg3200.txt +incorporation. pg3200.txt +incorporatorship, pg3200.txt +incorrect pg3200.txt +incorruptible pg3200.txt +incorruptibles!" pg3200.txt +increas'd pg100.txt +increase pg31100.txt, pg3200.txt, pg100.txt +increase, pg31100.txt, pg100.txt +increase. pg31100.txt, pg100.txt +increase; pg100.txt +increased pg31100.txt, pg3200.txt +increased, pg31100.txt, pg3200.txt +increased. pg31100.txt, pg3200.txt +increased." pg3200.txt +increased; pg31100.txt, pg3200.txt +increases pg3200.txt +increases--a pg3200.txt +increases." pg31100.txt +increaseth. pg100.txt +increasing pg31100.txt, pg3200.txt +increasing, pg31100.txt, pg100.txt +increasing." pg31100.txt +incredibilities; pg3200.txt +incredible pg31100.txt, pg3200.txt +incredible!--but pg3200.txt +incredible, pg3200.txt +incredible. pg3200.txt +incredulity. pg31100.txt +incredulous, pg31100.txt +incredulous. pg3200.txt +incrusted pg3200.txt +incumbent pg31100.txt, pg3200.txt +incumbent, pg31100.txt +incumbers pg3200.txt +incumbrance pg3200.txt +incumbrances pg3200.txt +incur pg100.txt +incurable- pg100.txt +incurable?" pg3200.txt +incurr'd pg100.txt +incurred, pg31100.txt +inde pg100.txt +inde, pg3200.txt, pg100.txt +indebted pg31100.txt +indebted, pg31100.txt +indebtedness pg3200.txt +indebtedness, pg3200.txt +indecencies. pg3200.txt +indecent pg3200.txt +indecision pg31100.txt +indecision, pg31100.txt +indecision. pg31100.txt +indecorous, pg31100.txt +indecorous." pg31100.txt +indecorum pg31100.txt +indeed pg31100.txt, pg3200.txt, pg100.txt +indeed! pg31100.txt, pg3200.txt, pg100.txt +indeed!" pg31100.txt, pg3200.txt +indeed!'" pg31100.txt +indeed!- pg100.txt +indeed!--as pg31100.txt +indeed!--but pg31100.txt +indeed!--this pg31100.txt +indeed, pg31100.txt, pg3200.txt, pg100.txt +indeed,' pg3200.txt +indeed- pg100.txt +indeed-- pg3200.txt +indeed--and pg31100.txt, pg3200.txt +indeed. pg31100.txt, pg3200.txt, pg100.txt +indeed." pg31100.txt, pg3200.txt +indeed.--"poor pg31100.txt +indeed; pg31100.txt, pg3200.txt, pg100.txt +indeed? pg100.txt +indeed?" pg31100.txt +indeed?"--but pg31100.txt +indeedy!" pg3200.txt +indefatigable pg31100.txt +indefinable pg3200.txt +indefinable, pg31100.txt +indefinite pg31100.txt, pg3200.txt +indefinite. pg31100.txt +indefinitely, pg3200.txt +indefinitely. pg3200.txt +indelicacy pg31100.txt +indelicacy. pg3200.txt +indelicate pg31100.txt, pg3200.txt +indelicate. pg3200.txt +indent pg100.txt +indented pg3200.txt +independant pg31100.txt +independence pg31100.txt, pg3200.txt +independence, pg31100.txt, pg3200.txt +independence. pg31100.txt, pg3200.txt +independence." pg31100.txt +independence; pg3200.txt +independent pg31100.txt, pg3200.txt +independent, pg31100.txt +independent. pg3200.txt +independent." pg31100.txt +independently pg31100.txt +independently, pg31100.txt +independently." pg3200.txt +independents pg3200.txt +indescribable pg3200.txt +indescribable. pg3200.txt +indestructible pg3200.txt +indestructible. pg3200.txt +indestructible." pg3200.txt +indestructibly pg3200.txt +index, pg3200.txt +index? pg100.txt +india pg3200.txt +india! pg100.txt +india" pg3200.txt +india, pg3200.txt, pg100.txt +india," pg3200.txt +india--a pg3200.txt +india--how pg3200.txt +india. pg3200.txt, pg100.txt +india; pg3200.txt +india? pg3200.txt +indian pg3200.txt +indian's pg3200.txt +indian's, pg3200.txt +indian, pg3200.txt +indian-like, pg100.txt +indian. pg3200.txt +indian; pg3200.txt +indian? pg3200.txt +indian?" pg3200.txt +indiana pg3200.txt +indiana. pg3200.txt +indians pg3200.txt +indians, pg3200.txt +indians--so pg3200.txt +indians. pg3200.txt +indians." pg3200.txt +indicate pg31100.txt, pg3200.txt +indicated pg3200.txt +indicated, pg3200.txt +indicated. pg3200.txt +indicated: pg3200.txt +indicated; pg3200.txt +indicates pg3200.txt +indicating pg31100.txt, pg3200.txt, pg100.txt +indication pg3200.txt +indications pg3200.txt +indications, pg3200.txt +indications--" pg3200.txt +indictment. pg100.txt +indies) pg31100.txt +indies. pg31100.txt +indies." pg31100.txt +indies; pg31100.txt +indies? pg100.txt +indifference pg31100.txt, pg3200.txt +indifference!" pg31100.txt +indifference, pg31100.txt, pg3200.txt +indifference. pg31100.txt +indifference." pg31100.txt, pg3200.txt +indifference: pg3200.txt +indifference; pg31100.txt +indifference?" pg31100.txt +indifferency, pg100.txt +indifferent pg31100.txt, pg3200.txt, pg100.txt +indifferent, pg31100.txt, pg3200.txt, pg100.txt +indifferent--absolutely, pg3200.txt +indifferent--but pg31100.txt +indifferent. pg31100.txt, pg3200.txt, pg100.txt +indifferent: pg3200.txt +indifferent; pg31100.txt, pg3200.txt +indifferently pg3200.txt +indifferently, pg3200.txt +indifferently. pg3200.txt, pg100.txt +indifferently: pg3200.txt +indigence pg31100.txt +indigence." pg31100.txt +indigent pg3200.txt +indigest pg100.txt +indigest, pg100.txt +indigestion, pg3200.txt +indigestion. pg3200.txt +indignant pg3200.txt +indignant! pg3200.txt +indignant, pg3200.txt +indignant--indignant pg3200.txt +indignant. pg3200.txt +indignantly-- pg3200.txt +indignantly. pg31100.txt +indignantly: pg3200.txt +indignation pg31100.txt, pg3200.txt, pg100.txt +indignation! pg3200.txt +indignation, pg31100.txt, pg3200.txt +indignation-- pg31100.txt +indignation. pg31100.txt, pg3200.txt, pg100.txt +indignation: pg3200.txt +indignities, pg100.txt +indignities. pg100.txt +indignity pg3200.txt +indignity, pg100.txt +indignity. pg100.txt +indigo pg3200.txt +indirect, pg100.txt +indiscreet pg3200.txt, pg100.txt +indiscreet, pg31100.txt +indiscreeter. pg3200.txt +indiscretion pg3200.txt +indiscretion, pg3200.txt +indiscretion. pg31100.txt, pg3200.txt +indispensable pg3200.txt +indispensable." pg31100.txt +indisposed pg31100.txt +indisposition pg31100.txt +indisposition, pg31100.txt, pg3200.txt +indisposition. pg31100.txt +indisputable pg31100.txt +indisputable; pg3200.txt +indisputably pg31100.txt +indisputably, pg3200.txt +indissoluble pg31100.txt +indistinct pg3200.txt +indistinct, pg100.txt +indistinct. pg31100.txt +indited pg100.txt +individual pg31100.txt, pg3200.txt +individual's pg3200.txt +individual, pg31100.txt, pg3200.txt +individual--provided pg3200.txt +individual. pg31100.txt, pg3200.txt +individual; pg3200.txt +individuality pg3200.txt +individuals pg3200.txt +individuals, pg3200.txt +individuals. pg3200.txt +indolence, pg3200.txt +indolent pg3200.txt +indolent, pg3200.txt +indolent. pg31100.txt +indolently; pg3200.txt +indoors, pg3200.txt +indorse pg3200.txt +indorsed pg3200.txt +indorsement." pg3200.txt +indorsers, pg3200.txt +indubitable. pg31100.txt +induc'd pg100.txt +induce pg31100.txt, pg3200.txt, pg100.txt +induced pg31100.txt, pg3200.txt +induced, pg3200.txt +inducement, pg31100.txt +inducement. pg100.txt +induces pg31100.txt +inducing pg3200.txt +induction pg100.txt +induction. pg100.txt +indued pg100.txt +indues pg100.txt +indulge pg31100.txt, pg3200.txt +indulge! pg31100.txt +indulged pg31100.txt, pg3200.txt +indulged. pg31100.txt +indulgence pg31100.txt, pg3200.txt, pg100.txt +indulgence, pg31100.txt +indulgence--they pg3200.txt +indulgence." pg31100.txt +indulgent pg31100.txt +indulging pg31100.txt, pg3200.txt +indurated pg3200.txt +industrial pg3200.txt +industrial-university!--read-first-and-second-time-considered-in- pg3200.txt +industries pg3200.txt +industries. pg3200.txt +industrious pg3200.txt +industrious. pg3200.txt +industrious; pg3200.txt +industriously pg100.txt +industry pg3200.txt +industry, pg100.txt +industry. pg3200.txt +industry; pg100.txt +inebriates. pg3200.txt +ineffable pg3200.txt +ineffaceable. pg3200.txt +ineffectual pg3200.txt +ineffectual, pg3200.txt +inelegance pg31100.txt +ineligible pg31100.txt +ineligible." pg31100.txt +ineligible; pg3200.txt +inequalities pg3200.txt +inequality pg31100.txt +ineradicable pg3200.txt +inertia--these pg3200.txt +inestimable. pg31100.txt, pg3200.txt +inevitable pg31100.txt, pg3200.txt +inevitable. pg31100.txt +inevitably. pg3200.txt +inexcusably pg31100.txt +inexhaustible. pg3200.txt +inexorable- pg100.txt +inexorably. pg3200.txt +inexpensive, pg3200.txt +inexpensive; pg3200.txt +inexperience pg3200.txt +inexperience, pg3200.txt +inexperienced pg3200.txt +inexplicable pg3200.txt +inexplicable. pg3200.txt +inexplicable." pg3200.txt +inextinguishable, pg3200.txt +infallibility pg3200.txt +infallible pg3200.txt, pg100.txt +infallible. pg3200.txt, pg100.txt +infallible; pg100.txt +infallibly pg3200.txt +infamies pg3200.txt +infamous pg31100.txt, pg3200.txt, pg100.txt +infamous!' pg3200.txt +infamous, pg100.txt +infamously pg31100.txt +infamously. pg31100.txt +infamy pg31100.txt, pg100.txt +infamy! pg3200.txt, pg100.txt +infamy, pg31100.txt, pg100.txt +infamy. pg31100.txt, pg3200.txt, pg100.txt +infamy; pg100.txt +infancy pg31100.txt, pg3200.txt, pg100.txt +infancy, pg31100.txt, pg100.txt +infancy. pg3200.txt, pg100.txt +infancy." pg31100.txt +infancy; pg100.txt +infant pg3200.txt +infant, pg100.txt +infant. pg3200.txt, pg100.txt +infant.)" pg3200.txt +infantile pg3200.txt +infantry pg3200.txt +infantry, pg31100.txt +infants pg3200.txt +infants. pg100.txt +infatuated pg3200.txt +infatuation pg3200.txt +infatuation, pg31100.txt +infatuation.--from pg31100.txt +infect pg3200.txt, pg100.txt +infect. pg100.txt +infected pg100.txt +infected! pg100.txt +infected, pg100.txt +infected. pg3200.txt, pg100.txt +infection pg31100.txt, pg3200.txt, pg100.txt +infection, pg31100.txt, pg100.txt +infection. pg31100.txt, pg100.txt +infection?" pg3200.txt +infectious. pg100.txt +infelicities pg3200.txt +infer pg31100.txt, pg3200.txt +infer, pg100.txt +inferable pg3200.txt +inference pg31100.txt, pg3200.txt +inferior pg31100.txt, pg3200.txt +inferior, pg3200.txt +inferior; pg3200.txt +inferiority pg31100.txt +inferiority--of pg31100.txt +inferiors pg3200.txt +inferiors. pg100.txt +infernal pg3200.txt +inferr'd pg100.txt +inferr'd'- pg100.txt +inferr'd? pg100.txt +inferred? pg31100.txt +inferring pg3200.txt +inferring. pg3200.txt +infest pg3200.txt +infested pg3200.txt +infesting pg3200.txt +infidel pg3200.txt +infidel, pg3200.txt +infidel? pg100.txt +infidelity. pg3200.txt +infidels, pg3200.txt, pg100.txt +infidels? pg100.txt +infinite pg31100.txt, pg100.txt +infinite, pg100.txt +infinite. pg3200.txt, pg100.txt +infinitely pg31100.txt, pg3200.txt +infinitesimal pg3200.txt +infirm? pg31100.txt +infirmities, pg3200.txt, pg100.txt +infirmities. pg3200.txt +infirmity pg31100.txt, pg3200.txt, pg100.txt +infirmity, pg100.txt +infirmity. pg100.txt +infirmity? pg100.txt +infixing, pg100.txt +inflam'd. pg100.txt +inflame pg3200.txt +inflame, pg100.txt +inflamed pg3200.txt +inflamed, pg3200.txt +inflames pg3200.txt +inflated pg3200.txt +inflating pg3200.txt +inflection). pg3200.txt +inflection, pg3200.txt +inflection. pg3200.txt +inflection.) pg3200.txt +inflections, pg3200.txt +inflexible pg3200.txt +inflict pg31100.txt, pg3200.txt, pg100.txt +inflicted pg3200.txt +inflicted. pg31100.txt +inflicting pg3200.txt +infliction pg3200.txt +infliction, pg3200.txt +inflow pg3200.txt +influence pg31100.txt, pg3200.txt, pg100.txt +influence! pg3200.txt +influence, pg31100.txt, pg3200.txt +influence-- pg3200.txt +influence--the pg3200.txt +influence. pg31100.txt, pg3200.txt +influence." pg3200.txt +influence; pg31100.txt +influenced pg31100.txt, pg3200.txt +influenced, pg31100.txt +influenced. pg3200.txt +influences pg3200.txt +influences, pg3200.txt, pg100.txt +influences. pg3200.txt +influences? pg3200.txt +influential pg3200.txt +influenza." pg31100.txt +infold. pg100.txt +inform pg31100.txt, pg3200.txt, pg100.txt +inform'd pg100.txt +inform'd. pg100.txt +inform, pg100.txt +information pg31100.txt, pg3200.txt, pg100.txt +information, pg31100.txt, pg3200.txt +information-- pg3200.txt +information--amongst pg31100.txt +information--but pg31100.txt +information--killing pg3200.txt +information. pg31100.txt, pg3200.txt +information." pg31100.txt, pg3200.txt +information: pg31100.txt, pg3200.txt, pg100.txt +information; pg31100.txt +informations pg100.txt +informed pg31100.txt, pg3200.txt, pg100.txt +informed, pg31100.txt +informed. pg3200.txt +informed." pg31100.txt +informing pg31100.txt, pg3200.txt +informs pg31100.txt, pg3200.txt, pg100.txt +infortunate, pg100.txt +infrequent pg3200.txt +infrequently pg3200.txt +infringe pg100.txt +infringement pg3200.txt +infuriated pg3200.txt +infus'd pg100.txt +ingener. pg100.txt +ingenious pg31100.txt, pg3200.txt +ingenious, pg3200.txt +ingenious--under pg3200.txt +ingenious. pg100.txt +ingenious? pg100.txt +ingeniously pg3200.txt +ingenuities. pg3200.txt +ingenuity pg3200.txt +ingenuity, pg31100.txt, pg3200.txt +ingenuity. pg3200.txt +ingenuity." pg31100.txt +ingenuousness pg31100.txt, pg3200.txt +ingenuousness, pg31100.txt +ingersoll--traveled pg3200.txt +ingersoll. pg3200.txt +ingersoll: pg3200.txt +inglonde pg3200.txt +ingot, pg3200.txt +ingots? pg3200.txt +ingrate. pg100.txt +ingrateful pg100.txt +ingratitude pg31100.txt, pg3200.txt, pg100.txt +ingratitude! pg100.txt +ingratitude, pg100.txt +ingratitude. pg100.txt +ingratitude; pg100.txt +ingratitudes. pg100.txt +ingredient pg3200.txt +ingredients pg3200.txt +inhabit pg3200.txt +inhabit. pg3200.txt +inhabitable pg100.txt +inhabitants pg31100.txt, pg3200.txt +inhabitants! pg100.txt +inhabitants, pg31100.txt, pg3200.txt +inhabitants. pg31100.txt, pg3200.txt +inhabitants." pg3200.txt +inhabitants? pg3200.txt +inhabited pg3200.txt +inhabited. pg3200.txt +inhabiting pg31100.txt +inhalation, pg3200.txt +inharmoniousnesses pg3200.txt +inhearse, pg100.txt +inherent pg3200.txt +inherit pg31100.txt, pg3200.txt, pg100.txt +inherit, pg100.txt +inherit. pg100.txt +inheritance pg3200.txt, pg100.txt +inheritance, pg3200.txt +inheritance. pg31100.txt, pg3200.txt, pg100.txt +inheritance; pg3200.txt +inheritance? pg100.txt +inherited pg3200.txt +inheriting pg31100.txt +inheritor pg100.txt +inheritor. pg100.txt +inheritors. pg100.txt +inheritress pg31100.txt +inherits pg31100.txt, pg3200.txt +inhospitable?" pg3200.txt +inhuman pg3200.txt +inhumanity pg31100.txt +inhumation pg3200.txt +inimitable pg31100.txt +iniquities pg3200.txt +iniquity pg3200.txt, pg100.txt +iniquity, pg100.txt +iniquity. pg3200.txt, pg100.txt +iniquity? pg100.txt +initial pg3200.txt +initial, pg3200.txt +initials) pg3200.txt +initials? pg3200.txt +initiated pg3200.txt +initiation pg3200.txt +initiation, pg3200.txt +initiation." pg3200.txt +initiative. pg3200.txt +inject pg3200.txt +injected pg3200.txt +injected. pg3200.txt +injudicious pg31100.txt +injudicious." pg3200.txt +injudiciously. pg3200.txt +injun pg3200.txt +injun!" pg3200.txt +injun, pg3200.txt +injunction pg31100.txt, pg3200.txt +injunction. pg31100.txt +injure pg31100.txt, pg3200.txt +injured pg31100.txt, pg3200.txt +injured, pg3200.txt +injured. pg3200.txt +injured." pg3200.txt +injured; pg3200.txt +injuries pg31100.txt, pg3200.txt, pg100.txt +injuries, pg3200.txt, pg100.txt +injuries. pg3200.txt, pg100.txt +injuries: pg100.txt +injuries; pg100.txt +injuries? pg100.txt +injuring pg3200.txt +injurious pg31100.txt, pg3200.txt +injurious, pg31100.txt +injuriously pg3200.txt +injury pg31100.txt, pg3200.txt, pg100.txt +injury, pg3200.txt, pg100.txt +injury. pg31100.txt, pg3200.txt, pg100.txt +injury; pg100.txt +injustice pg31100.txt, pg3200.txt, pg100.txt +injustice, pg100.txt +injustice; pg31100.txt, pg3200.txt +injustice? pg100.txt +ink, pg3200.txt, pg100.txt +ink--and pg3200.txt +ink. pg3200.txt, pg100.txt +ink: pg3200.txt +ink; pg100.txt +inkhorn pg100.txt +inkling pg100.txt +inky pg3200.txt +inlaid pg3200.txt +inland pg100.txt +inland, pg3200.txt +inland. pg3200.txt +inmates pg3200.txt +inmates, pg31100.txt +inn pg31100.txt, pg3200.txt, pg100.txt +inn, pg31100.txt, pg3200.txt, pg100.txt +inn- pg100.txt +inn. pg31100.txt, pg3200.txt, pg100.txt +inn." pg31100.txt +inn; pg3200.txt, pg100.txt +inn?" pg31100.txt +innate pg31100.txt +inner pg3200.txt +innings pg3200.txt +innings! pg3200.txt +innings, pg3200.txt +innings." pg3200.txt +innkeeper pg100.txt +innkeepers, pg31100.txt +innocence pg3200.txt, pg100.txt +innocence! pg100.txt +innocence, pg31100.txt, pg3200.txt, pg100.txt +innocence. pg3200.txt, pg100.txt +innocence." pg3200.txt +innocence; pg31100.txt, pg3200.txt, pg100.txt +innocence? pg3200.txt, pg100.txt +innocency pg100.txt +innocency. pg100.txt +innocent pg31100.txt, pg3200.txt, pg100.txt +innocent! pg3200.txt +innocent, pg3200.txt +innocent. pg3200.txt, pg100.txt +innocent." pg3200.txt +innocent; pg31100.txt, pg3200.txt +innocent? pg100.txt +innocently pg31100.txt, pg3200.txt +innocently. pg31100.txt, pg3200.txt +innocents pg3200.txt +innocents, pg3200.txt, pg100.txt +innocents--that pg3200.txt +innovation. pg3200.txt, pg100.txt +innovator, pg100.txt +inns pg31100.txt, pg3200.txt +inns, pg3200.txt +innuendo pg3200.txt +innuendo. pg3200.txt +innumerable pg3200.txt +inoculating pg3200.txt +inoffensive pg3200.txt +inoffensive, pg3200.txt +inoffensive. pg3200.txt +inquest pg3200.txt +inquest, pg3200.txt +inquest. pg3200.txt +inquest." pg3200.txt +inquest: pg3200.txt +inquire pg31100.txt, pg3200.txt, pg100.txt +inquire, pg31100.txt +inquire. pg3200.txt +inquire; pg31100.txt, pg3200.txt +inquired pg31100.txt, pg3200.txt +inquired, pg3200.txt +inquired-- pg3200.txt +inquirer pg3200.txt +inquires pg3200.txt +inquires. pg3200.txt +inquiries pg31100.txt, pg3200.txt +inquiries, pg31100.txt, pg3200.txt +inquiries,--"was pg31100.txt +inquiries. pg31100.txt +inquiring pg31100.txt, pg3200.txt +inquiring, pg31100.txt, pg3200.txt +inquiringly pg3200.txt +inquiringly. pg3200.txt +inquiry pg31100.txt, pg3200.txt +inquiry, pg31100.txt +inquiry. pg31100.txt, pg3200.txt +inquiry." pg31100.txt +inquisition pg3200.txt +inquisition, pg100.txt +inquisition. pg3200.txt +inquisition; pg3200.txt +inquisitive pg100.txt +inquisitiveness pg31100.txt +inquisitors pg3200.txt +inroad pg3200.txt +inroad." pg3200.txt +inroader. pg3200.txt +inroads pg100.txt +ins pg3200.txt +insane pg3200.txt +insane! pg3200.txt +insane, pg3200.txt +insane--like pg3200.txt +insane. pg3200.txt +insane." pg3200.txt +insane; pg3200.txt +insane?" pg3200.txt +insaner pg3200.txt +insanie: pg100.txt +insanities, pg3200.txt +insanities? pg3200.txt +insanity pg3200.txt +insanity!" pg3200.txt +insanity, pg3200.txt +insanity--this--" pg3200.txt +insanity. pg31100.txt, pg3200.txt +insanity." pg3200.txt +insanity.' pg3200.txt +insanity: pg3200.txt +inscribed pg31100.txt, pg3200.txt +inscribed--"gone pg3200.txt +inscribed: pg3200.txt +inscription pg3200.txt +inscription-- pg3200.txt +inscription. pg3200.txt +inscription: pg3200.txt +inscription:-- pg3200.txt +inscription; pg3200.txt +inscriptions, pg3200.txt +inscriptions. pg3200.txt +inscroll'd. pg100.txt +inscrutable pg3200.txt +insculpture, pg100.txt +insect pg3200.txt +insect, pg3200.txt +insectivorous, pg3200.txt +insects, pg3200.txt +insecure, pg3200.txt +insecurity pg3200.txt +insensibility pg31100.txt +insensibility! pg31100.txt +insensibility." pg31100.txt +insensible pg31100.txt, pg3200.txt +insensible! pg31100.txt +insensible, pg3200.txt +insensible. pg3200.txt +insensibly pg3200.txt +inseparable pg31100.txt, pg3200.txt +inseparable. pg100.txt +inseparate pg100.txt +insert pg3200.txt +inserted pg3200.txt +inserted." pg3200.txt +inserting pg31100.txt +inshipp'd, pg100.txt +inside pg31100.txt, pg3200.txt, pg100.txt +inside, pg3200.txt +inside. pg3200.txt +insides pg3200.txt +insides--remove pg3200.txt +insight pg3200.txt +insight." pg3200.txt +insight?" pg3200.txt +insignificance pg31100.txt, pg3200.txt +insignificance. pg31100.txt, pg3200.txt +insignificant pg3200.txt +insignificant, pg31100.txt, pg3200.txt +insignificant. pg3200.txt +insignificant: pg31100.txt, pg3200.txt +insignificants pg3200.txt +insincere, pg31100.txt, pg3200.txt +insinuate pg3200.txt +insinuating pg31100.txt, pg3200.txt +insinuating." pg31100.txt +insinuation, pg3200.txt, pg100.txt +insinuation--" pg3200.txt +insinuations pg31100.txt +insinuations, pg3200.txt +insinuations," pg3200.txt +insinuations. pg31100.txt +insipid pg31100.txt, pg3200.txt +insipid. pg3200.txt +insipidity pg31100.txt +insipient pg3200.txt +insist pg31100.txt, pg3200.txt +insist, pg3200.txt +insist--always pg3200.txt +insist--i pg3200.txt +insisted pg31100.txt, pg3200.txt +insisted, pg3200.txt +insisting pg31100.txt, pg3200.txt +insists pg31100.txt +insolence pg31100.txt, pg3200.txt, pg100.txt +insolence! pg31100.txt +insolence, pg100.txt +insolence. pg31100.txt, pg100.txt +insolence; pg3200.txt +insolence? pg100.txt +insolences pg3200.txt +insolent pg31100.txt, pg3200.txt +insolent! pg31100.txt +insolent, pg3200.txt, pg100.txt +insolent. pg31100.txt +insolent; pg100.txt +insolently pg31100.txt +insolently, pg3200.txt +insomnia, pg3200.txt +insomuch pg3200.txt +inspect pg31100.txt, pg3200.txt +inspected pg3200.txt +inspecting pg3200.txt +inspecting, pg3200.txt +inspection pg3200.txt +inspection, pg31100.txt, pg3200.txt +inspection--i pg3200.txt +inspection. pg31100.txt, pg3200.txt +inspections. pg3200.txt +inspector pg3200.txt +inspector. pg3200.txt +inspectors pg3200.txt +inspir'd, pg100.txt +inspir'd. pg100.txt +inspiration pg3200.txt +inspiration!" pg3200.txt +inspiration, pg3200.txt +inspiration--definitely pg3200.txt +inspiration. pg31100.txt, pg3200.txt +inspiration." pg3200.txt +inspiration.) pg3200.txt +inspiration: pg3200.txt +inspiration? pg100.txt +inspirations pg3200.txt +inspirations." pg3200.txt +inspirations?" pg3200.txt +inspire pg31100.txt, pg3200.txt +inspire, pg31100.txt +inspire. pg3200.txt +inspire; pg31100.txt +inspired pg31100.txt, pg3200.txt, pg100.txt +inspired) pg3200.txt +inspired, pg3200.txt +inspired--might pg3200.txt +inspired. pg31100.txt, pg3200.txt +inspires. pg3200.txt +inspiring pg3200.txt +inspiring, pg3200.txt +inspiring. pg3200.txt +inspiriting. pg3200.txt +inst.) pg3200.txt +inst., pg3200.txt +inst: pg3200.txt +instability. pg3200.txt +instable pg3200.txt +install pg3200.txt +install'd pg100.txt +install'd; pg100.txt +installations pg3200.txt +installed pg3200.txt +installments pg3200.txt +installments." pg3200.txt +instance pg31100.txt, pg3200.txt +instance!' pg3200.txt +instance, pg31100.txt, pg3200.txt, pg100.txt +instance-- pg3200.txt +instance--" pg3200.txt +instance--and pg3200.txt +instance--may pg3200.txt +instance. pg31100.txt, pg3200.txt, pg100.txt +instance." pg3200.txt +instance: pg3200.txt +instance; pg3200.txt, pg100.txt +instance? pg3200.txt +instance?" pg3200.txt +instanced pg3200.txt +instances pg31100.txt, pg3200.txt +instances, pg31100.txt, pg3200.txt, pg100.txt +instances. pg3200.txt +instances: pg100.txt +instances; pg31100.txt, pg100.txt +instances? pg3200.txt +instant pg31100.txt, pg3200.txt, pg100.txt +instant! pg31100.txt +instant's pg31100.txt +instant, pg31100.txt, pg3200.txt, pg100.txt +instant. pg3200.txt, pg100.txt +instant; pg31100.txt, pg3200.txt +instant? pg100.txt +instantaneous pg31100.txt, pg3200.txt +instantaneously, pg31100.txt +instantaneously. pg3200.txt +instantly pg31100.txt, pg3200.txt +instantly), pg3200.txt +instantly, pg31100.txt, pg3200.txt +instantly--stopped, pg3200.txt +instantly. pg31100.txt, pg3200.txt, pg100.txt +instantly; pg31100.txt, pg3200.txt +instants pg31100.txt, pg3200.txt +instead pg31100.txt, pg3200.txt +instead! pg3200.txt +instead!" pg3200.txt +instead, pg31100.txt, pg3200.txt +instead. pg31100.txt, pg3200.txt +instead." pg3200.txt +instead.' pg3200.txt +instead; pg3200.txt +instead? pg31100.txt +insteeped, pg100.txt +instigated pg100.txt +instigated. pg3200.txt +instigated?" pg31100.txt +instigation, pg31100.txt, pg100.txt +instigator pg31100.txt +instinct pg3200.txt +instinct! pg3200.txt, pg100.txt +instinct, pg3200.txt, pg100.txt +instinct. pg3200.txt, pg100.txt +instinct." pg3200.txt +instinct; pg3200.txt +instinct? pg3200.txt +instincts pg3200.txt +instincts, pg3200.txt +instink, pg3200.txt +instink. pg3200.txt +instink? pg3200.txt +institute pg3200.txt, pg100.txt +instituted pg3200.txt +instituted." pg3200.txt +institution pg3200.txt +institution, pg3200.txt +institution," pg3200.txt +institution. pg3200.txt +institution?" pg3200.txt +institutions pg3200.txt +institutions, pg3200.txt +institutions. pg3200.txt +institutions." pg3200.txt +instruct pg3200.txt, pg100.txt +instructed pg31100.txt, pg3200.txt +instructed, pg3200.txt +instructed. pg100.txt +instructing pg31100.txt +instruction pg31100.txt, pg3200.txt, pg100.txt +instruction, pg3200.txt, pg100.txt +instruction. pg3200.txt, pg100.txt +instruction?" pg31100.txt +instructions pg3200.txt, pg100.txt +instructions, pg3200.txt +instructions. pg3200.txt +instructions." pg3200.txt +instructions.' pg3200.txt +instructions; pg100.txt +instructive pg3200.txt +instructive, pg31100.txt, pg3200.txt +instructive. pg3200.txt +instructors pg3200.txt +instructress." pg31100.txt +instructs pg100.txt +instrument pg31100.txt, pg3200.txt, pg100.txt +instrument! pg100.txt +instrument's pg31100.txt +instrument, pg31100.txt, pg100.txt +instrument--a pg3200.txt +instrument--or pg3200.txt +instrument--which pg31100.txt +instrument. pg31100.txt, pg3200.txt, pg100.txt +instrument." pg31100.txt +instrument; pg100.txt +instrument? pg100.txt +instrumental pg31100.txt, pg3200.txt +instrumental, pg31100.txt +instruments pg3200.txt, pg100.txt +instruments, pg3200.txt, pg100.txt +instruments. pg3200.txt +instruments; pg3200.txt +instruments? pg100.txt +insubordination, pg3200.txt +insubstantial pg3200.txt +insufferable pg31100.txt, pg3200.txt +insufferable. pg31100.txt, pg3200.txt +insufferably pg31100.txt, pg3200.txt +insufficiency? pg100.txt +insufficient pg31100.txt, pg3200.txt +insulated pg3200.txt +insult pg31100.txt, pg3200.txt +insult, pg31100.txt +insult. pg3200.txt +insult." pg3200.txt +insult; pg3200.txt +insult? pg100.txt +insulted pg3200.txt +insulted, pg3200.txt +insulted. pg3200.txt +insulted? pg3200.txt +insulting pg3200.txt +insulting. pg3200.txt +insulting."] pg3200.txt +insults pg3200.txt +insults, pg3200.txt +insuperable pg31100.txt +insupportable!" pg3200.txt +insupportable, pg31100.txt +insurance pg3200.txt +insurance, pg3200.txt +insurance--etc. pg3200.txt +insurance. pg3200.txt +insure pg3200.txt +insured pg3200.txt +insures pg3200.txt +insurgent pg3200.txt +insurmountable--you pg31100.txt +insurrection pg100.txt +insurrection, pg3200.txt +insurrection. pg3200.txt, pg100.txt +insurrections pg3200.txt +int'rest pg100.txt +intact, pg3200.txt +intact. pg3200.txt +intangible, pg3200.txt +integrity pg3200.txt, pg100.txt +integrity, pg31100.txt, pg100.txt +integrity. pg100.txt +integrity; pg100.txt +intellect pg3200.txt +intellect, pg3200.txt +intellect. pg3200.txt +intellect." pg31100.txt, pg3200.txt +intellect? pg3200.txt +intellect?" pg3200.txt +intellects pg3200.txt +intellects; pg3200.txt +intellectual pg31100.txt, pg3200.txt, pg100.txt +intellectual, pg3200.txt +intellectual." pg3200.txt +intellectual? pg3200.txt +intellectuality! pg3200.txt +intellectually. pg3200.txt +intelligence pg31100.txt, pg3200.txt, pg100.txt +intelligence, pg31100.txt, pg3200.txt, pg100.txt +intelligence--elizabeth pg31100.txt +intelligence. pg31100.txt, pg3200.txt, pg100.txt +intelligence; pg31100.txt, pg100.txt +intelligence? pg3200.txt, pg100.txt +intelligencer pg100.txt +intelligencer; pg100.txt +intelligent pg3200.txt +intelligent, pg3200.txt +intelligently pg3200.txt +intelligently. pg3200.txt +intelligible pg3200.txt +intelligible, pg31100.txt, pg3200.txt +intelligible. pg31100.txt, pg3200.txt +intelligible; pg31100.txt +intelligo. pg100.txt +intemperance pg3200.txt, pg100.txt +intemperance. pg100.txt +intend pg31100.txt, pg3200.txt, pg100.txt +intend! pg100.txt +intend, pg100.txt +intend. pg100.txt +intend; pg100.txt +intended pg31100.txt, pg3200.txt, pg100.txt +intended, pg31100.txt +intended. pg31100.txt, pg3200.txt +intended." pg31100.txt +intended; pg100.txt +intending pg31100.txt, pg3200.txt +intendment pg100.txt +intendment, pg100.txt +intends pg3200.txt +intends; pg100.txt +intense pg3200.txt +intense, pg3200.txt +intensely pg3200.txt +intensified pg3200.txt +intensified; pg3200.txt +intensity pg3200.txt +intensity. pg3200.txt +intensity; pg3200.txt +intent pg31100.txt, pg3200.txt, pg100.txt +intent, pg3200.txt, pg100.txt +intent- pg100.txt +intent--yet pg3200.txt +intent. pg100.txt +intent: pg100.txt +intent; pg100.txt +intent? pg100.txt +intention pg31100.txt, pg3200.txt +intention, pg31100.txt, pg3200.txt +intention--" pg31100.txt +intention. pg3200.txt +intention." pg3200.txt +intention; pg31100.txt +intention?" pg3200.txt +intentional pg3200.txt +intentional!" pg3200.txt +intentional. pg3200.txt +intentionally pg31100.txt, pg3200.txt +intentionally, pg3200.txt +intentionally. pg3200.txt +intentions pg31100.txt, pg3200.txt +intentions, pg31100.txt, pg3200.txt +intentions. pg31100.txt, pg3200.txt +intentions." pg31100.txt +intently pg31100.txt, pg3200.txt +intently, pg31100.txt +intents pg100.txt +intents, pg100.txt +intents- pg100.txt +intents. pg100.txt +intents; pg100.txt +inter pg3200.txt, pg100.txt +inter'gatories, pg100.txt +inter'gatory pg100.txt +inter--" pg3200.txt +inter--en pg3200.txt +inter-island pg3200.txt +intercede pg3200.txt +intercepted pg3200.txt +intercession pg3200.txt, pg100.txt +interchange pg31100.txt, pg100.txt +interchange; pg100.txt +interchangeably pg100.txt +intercourse pg31100.txt, pg3200.txt +intercourse, pg31100.txt +intercourse. pg31100.txt +intercourse." pg3200.txt +interdict pg3200.txt +interdict! pg3200.txt +interdict!" pg3200.txt +interest pg31100.txt, pg3200.txt, pg100.txt +interest! pg3200.txt +interest, pg31100.txt, pg3200.txt, pg100.txt +interest-- pg31100.txt, pg3200.txt +interest--and pg3200.txt +interest--as pg3200.txt +interest--fort pg3200.txt +interest--the pg3200.txt +interest-which pg3200.txt +interest. pg31100.txt, pg3200.txt +interest: pg31100.txt, pg3200.txt +interest; pg31100.txt, pg3200.txt +interest? pg100.txt +interest?" pg3200.txt +interested pg31100.txt, pg3200.txt +interested), pg3200.txt +interested, pg3200.txt +interested--for pg3200.txt +interested--just pg3200.txt +interested. pg3200.txt +interested; pg3200.txt +interestin'." pg3200.txt +interesting pg31100.txt, pg3200.txt +interesting, pg31100.txt, pg3200.txt +interesting. pg3200.txt +interesting." pg31100.txt, pg3200.txt +interesting.--in pg31100.txt +interesting: pg3200.txt +interesting; pg3200.txt +interests pg31100.txt, pg3200.txt +interests, pg3200.txt +interests. pg3200.txt +interfere pg3200.txt +interfere, pg31100.txt, pg3200.txt +interfere. pg31100.txt, pg3200.txt +interfered, pg3200.txt +interfered. pg3200.txt +interference pg31100.txt, pg3200.txt +interference, pg31100.txt +interference--fruitless pg31100.txt +interference." pg31100.txt +interference; pg31100.txt +interference?" pg31100.txt +interfering pg31100.txt, pg3200.txt +interfering." pg3200.txt +interim pg100.txt +interior pg31100.txt, pg3200.txt +interior," pg3200.txt +interior. pg3200.txt +interior; pg3200.txt +interlaken, pg3200.txt +interlineations pg3200.txt +interlineations. pg3200.txt +interlude! pg100.txt +interlude. pg3200.txt +intermarried; pg3200.txt +intermezzo pg3200.txt +interminable pg3200.txt +interminably pg3200.txt +interminably, pg3200.txt +intermingle pg100.txt +intermingled pg31100.txt +intermission pg31100.txt, pg100.txt +intermission, pg3200.txt, pg100.txt +intermission; pg31100.txt +intermittent pg3200.txt +intermix'd pg100.txt +intermixed'? pg100.txt +internal pg31100.txt, pg3200.txt, pg100.txt +international pg3200.txt +interpolating, pg3200.txt +interposed pg31100.txt +interposed. pg31100.txt +interposes.] pg100.txt +interposing pg3200.txt +interposition--"very pg31100.txt +interpret pg100.txt +interpret. pg100.txt +interpretation pg3200.txt +interpretation" pg3200.txt +interpretation. pg3200.txt +interpretations pg3200.txt +interpreted pg31100.txt, pg3200.txt +interpreted; pg100.txt +interpreter pg3200.txt, pg100.txt +interpreter! pg100.txt +interpreter, pg100.txt +interpreter. pg3200.txt, pg100.txt +interpreter; pg3200.txt +interpreters pg3200.txt, pg100.txt +interr'd pg100.txt +interr'd. pg100.txt +interr'd; pg100.txt +interred pg3200.txt +interrogatories pg3200.txt, pg100.txt +interrupt pg31100.txt, pg3200.txt, pg100.txt +interrupt, pg3200.txt +interrupt--let pg3200.txt +interrupted pg31100.txt, pg3200.txt +interrupted, pg31100.txt +interrupted. pg31100.txt, pg3200.txt, pg100.txt +interrupted: pg3200.txt +interrupted; pg31100.txt +interrupter pg100.txt +interrupting-- pg3200.txt +interrupting. pg3200.txt +interruption pg31100.txt, pg3200.txt, pg100.txt +interruption! pg3200.txt +interruption, pg3200.txt +interruption-- pg3200.txt +interruption. pg31100.txt, pg3200.txt +interruption; pg3200.txt +interruptions pg31100.txt, pg3200.txt +interruptions: pg3200.txt +interrupts pg3200.txt +interval pg31100.txt, pg3200.txt +interval, pg3200.txt +interval; pg3200.txt +intervals pg31100.txt, pg3200.txt +intervals, pg3200.txt +intervals--with pg3200.txt +intervals. pg3200.txt +intervals; pg3200.txt +intervene. pg31100.txt, pg3200.txt +intervened pg3200.txt +intervened. pg3200.txt +intervened; pg31100.txt +intervening pg31100.txt, pg3200.txt +intervention pg3200.txt +intervention. pg3200.txt +interview pg31100.txt, pg3200.txt, pg100.txt +interview, pg31100.txt, pg3200.txt, pg100.txt +interview. pg31100.txt, pg3200.txt +interviewed pg3200.txt +interviewer pg3200.txt +interviewer. pg3200.txt +interviewing, pg3200.txt +interviews pg3200.txt +interviews. pg3200.txt +interwoven pg3200.txt +intestines pg3200.txt +intimacies pg31100.txt +intimacy pg31100.txt, pg3200.txt +intimacy, pg31100.txt +intimate pg31100.txt, pg3200.txt, pg100.txt +intimated pg31100.txt, pg3200.txt +intimated, pg3200.txt +intimately pg31100.txt, pg3200.txt +intimately, pg3200.txt +intimates, pg3200.txt +intimates--but pg3200.txt +intimates. pg3200.txt +intimating pg3200.txt +intimations pg3200.txt +intimidate pg31100.txt +intimidated pg31100.txt +intimidation, pg3200.txt +intirely pg31100.txt +into't pg100.txt +into't. pg100.txt +into, pg31100.txt, pg3200.txt, pg100.txt +into--" pg3200.txt +into---- pg3200.txt +into. pg31100.txt, pg3200.txt, pg100.txt +into." pg31100.txt, pg3200.txt +into; pg3200.txt +into?" pg3200.txt +intolerable pg3200.txt, pg100.txt +intolerable, pg3200.txt +intolerable. pg31100.txt, pg3200.txt, pg100.txt +intolerable." pg31100.txt +intolerably pg31100.txt +intoxicants pg3200.txt +intoxicate pg3200.txt +intoxicated, pg3200.txt +intoxicating pg3200.txt +intoxicating!" pg3200.txt +intoxicating, pg3200.txt +intoxication pg3200.txt +intoxication, pg31100.txt +intractable, pg31100.txt +intreasured. pg100.txt +intreat pg31100.txt +intrench'd, pg100.txt +intrenched pg3200.txt +intrepid pg3200.txt +intrepidity pg3200.txt +intrepidity: pg31100.txt +intrepidly. pg3200.txt +intricacy pg3200.txt +intricate pg31100.txt, pg3200.txt +intricate, pg3200.txt +intricately pg3200.txt +intrigue pg3200.txt +intrigue, pg31100.txt +intrinsic pg3200.txt +intrinsicate pg100.txt +introduce pg31100.txt, pg3200.txt +introduce, pg3200.txt +introduced pg31100.txt, pg3200.txt +introduced, pg31100.txt, pg3200.txt +introduced. pg3200.txt +introduced: pg3200.txt +introduced; pg3200.txt +introduces pg3200.txt +introducing pg31100.txt, pg3200.txt +introducing. pg3200.txt +introduction pg31100.txt, pg3200.txt +introduction. pg31100.txt, pg3200.txt +introductions pg3200.txt +introductions. pg3200.txt +intrude pg31100.txt, pg3200.txt +intrude, pg3200.txt +intrude. pg3200.txt +intruded pg31100.txt, pg3200.txt +intruded. pg3200.txt +intruder pg3200.txt +intruder, pg31100.txt +intruders pg3200.txt +intrudes pg3200.txt +intruding pg31100.txt, pg3200.txt +intrusion pg31100.txt, pg3200.txt, pg100.txt +intrusion. pg3200.txt +intrusive pg3200.txt +intrust pg3200.txt +intrust, pg3200.txt +intuitions pg3200.txt +intuitively pg31100.txt +inundation pg3200.txt +inundation. pg3200.txt +inundation; pg3200.txt, pg100.txt +inure pg3200.txt +inured pg31100.txt, pg3200.txt +inurn'd, pg100.txt +invade pg100.txt +invaded pg3200.txt +invader. pg3200.txt +invader." pg3200.txt +invading pg3200.txt +invalid pg31100.txt, pg3200.txt +invalid, pg31100.txt +invalid. pg3200.txt +invalids.' pg3200.txt +invalids; pg3200.txt +invaluable. pg31100.txt +invariable pg31100.txt +invariably pg3200.txt +invariably, pg3200.txt +invasion pg3200.txt +invasion! pg100.txt +invasion? pg3200.txt +invective. pg31100.txt +invent pg31100.txt, pg3200.txt, pg100.txt +invent, pg100.txt +invent. pg3200.txt +invented pg31100.txt, pg3200.txt, pg100.txt +invented. pg3200.txt +inventing pg31100.txt, pg3200.txt +invention pg3200.txt, pg100.txt +invention, pg31100.txt, pg3200.txt, pg100.txt +invention," pg3200.txt +invention- pg100.txt +invention-- pg3200.txt +invention--as pg3200.txt +invention--hence pg3200.txt +invention--the pg3200.txt +invention. pg31100.txt, pg3200.txt, pg100.txt +invention." pg3200.txt +invention; pg100.txt +inventions pg3200.txt +inventions, pg3200.txt +inventions. pg3200.txt, pg100.txt +inventive pg3200.txt +inventor pg3200.txt +inventor's pg3200.txt +inventor, pg3200.txt +inventoried, pg100.txt +inventors pg3200.txt +inventory pg31100.txt, pg3200.txt, pg100.txt +inventory. pg100.txt +invents pg3200.txt +inverness, pg100.txt +invert pg100.txt +inverted pg3200.txt +invest pg3200.txt, pg100.txt +invest. pg3200.txt +invested pg3200.txt +invested, pg100.txt +invested. pg100.txt +investigated pg3200.txt +investigated, pg31100.txt +investigating. pg3200.txt +investigation pg31100.txt, pg3200.txt +investigation, pg3200.txt +investigation. pg31100.txt, pg3200.txt +investigation." pg3200.txt +investigation.] pg3200.txt +investigations pg3200.txt +investigations, pg3200.txt +investigations. pg3200.txt +investing pg31100.txt, pg3200.txt +investment pg3200.txt +investment, pg3200.txt +investment. pg3200.txt +investment? pg3200.txt +investments pg3200.txt +investors. pg3200.txt +inveterate pg3200.txt +invigorated pg3200.txt +invigorating pg3200.txt +invincible pg3200.txt, pg100.txt +inviolable. pg100.txt +inviolable." pg3200.txt +invisible pg3200.txt, pg100.txt +invisible, pg3200.txt, pg100.txt +invisible. pg3200.txt, pg100.txt +invisible.-- pg3200.txt +invisible; pg100.txt +invisible] pg100.txt +invisibly pg3200.txt +invitation pg31100.txt, pg3200.txt +invitation! pg31100.txt +invitation, pg31100.txt, pg3200.txt +invitation,) pg3200.txt +invitation. pg31100.txt, pg3200.txt +invitation: pg31100.txt +invitation; pg31100.txt +invitations pg31100.txt, pg3200.txt +invitations, pg31100.txt +invitations." pg31100.txt +invite pg31100.txt, pg3200.txt +invite." pg3200.txt +invited pg31100.txt, pg3200.txt, pg100.txt +invited) pg3200.txt +invited, pg31100.txt, pg3200.txt +invited. pg3200.txt +invited." pg31100.txt, pg3200.txt +invites pg31100.txt, pg100.txt +inviting pg31100.txt, pg3200.txt, pg100.txt +inviting, pg100.txt +inviting. pg3200.txt +inviting." pg31100.txt +invocate, pg100.txt +invocate: pg100.txt +invocation pg100.txt +invocation. pg3200.txt, pg100.txt +invoice. pg3200.txt +invoke pg3200.txt +invoked pg3200.txt +invokes! pg3200.txt +involuntarily pg3200.txt +involuntarily. pg3200.txt +involuntary pg31100.txt +involuntary, pg31100.txt +involuted pg3200.txt +involve pg31100.txt +involved pg31100.txt, pg3200.txt +involved, pg3200.txt +involved. pg31100.txt, pg3200.txt +involved.--i pg31100.txt +involved; pg3200.txt +involved? pg3200.txt +involves pg3200.txt +invulnerable, pg100.txt +inward pg3200.txt, pg100.txt +inwardly pg3200.txt, pg100.txt +inwardly, pg3200.txt +inwardly. pg100.txt +inwardness pg3200.txt +inwards, pg100.txt +ionia, pg100.txt +ionic pg3200.txt +iowa. pg3200.txt +iowa.: pg3200.txt +iowa: pg3200.txt +ipecac pg3200.txt +ipse, pg100.txt +irae? pg100.txt +iras pg100.txt +iras! pg100.txt +iras, pg100.txt +irascible pg3200.txt +ire, pg31100.txt, pg100.txt +ireland pg3200.txt, pg100.txt +ireland, pg31100.txt, pg100.txt +ireland. pg3200.txt, pg100.txt +ireland." pg31100.txt, pg3200.txt +ireland; pg100.txt +ireland? pg100.txt +irgend pg3200.txt +iridescent pg3200.txt +iris pg100.txt +irish pg31100.txt, pg3200.txt, pg100.txt +irish, pg3200.txt +irish--24. pg3200.txt +irish. pg3200.txt, pg100.txt +irishman pg3200.txt +irishman's pg3200.txt +irishmen pg3200.txt +irishmen? pg100.txt +irksome pg3200.txt +irksome. pg3200.txt +irksome." pg31100.txt +irksome; pg31100.txt +iro pg3200.txt +iro, pg3200.txt +iron pg31100.txt, pg3200.txt, pg100.txt +iron! pg3200.txt, pg100.txt +iron!' pg3200.txt +iron, pg3200.txt, pg100.txt +iron-clad; pg3200.txt +iron-work pg31100.txt +iron. pg3200.txt, pg100.txt +iron; pg3200.txt +iron? pg3200.txt, pg100.txt +ironed. pg3200.txt +ironhorse." pg3200.txt +ironical pg3200.txt +ironically pg3200.txt +ironically-- pg3200.txt +ironies. pg3200.txt +ironville." pg3200.txt +irony pg3200.txt +irony! pg3200.txt +irony-- pg3200.txt +irony: pg3200.txt +irrational pg3200.txt +irreconcilable pg31100.txt +irrecoverable; pg100.txt +irrecoverably. pg3200.txt +irrefragable pg3200.txt +irregular, pg100.txt +irregularities, pg3200.txt +irregularity, pg31100.txt +irregularity. pg3200.txt +irrelevancy, pg3200.txt +irrelevancy: pg3200.txt +irrelevant pg3200.txt +irrelevant, pg3200.txt +irrelevant." pg3200.txt +irreligious pg31100.txt, pg3200.txt +irremovable, pg100.txt +irreparable pg3200.txt +irrepressible pg31100.txt +irreproachable, pg31100.txt +irresistible pg31100.txt, pg3200.txt +irresistible. pg31100.txt, pg3200.txt +irresistible; pg31100.txt +irresolute pg31100.txt +irresolute, pg3200.txt +irresponsible pg3200.txt +irresponsible. pg3200.txt +irretrievable; pg31100.txt +irreverence pg3200.txt +irreverence, pg3200.txt +irreverence. pg3200.txt +irreverence." pg3200.txt +irreverence; pg3200.txt +irreverent pg3200.txt +irreverent, pg3200.txt +irreverently pg3200.txt +irrevocable pg100.txt +irrevocable. pg100.txt +irrevocably. pg3200.txt +irritable pg31100.txt, pg3200.txt +irritable, pg3200.txt +irritable-- pg3200.txt +irritate pg3200.txt +irritated pg3200.txt +irritated, pg3200.txt +irritated-- pg3200.txt +irritated. pg3200.txt +irritating, pg3200.txt +irritatingly pg3200.txt +irritation pg31100.txt, pg3200.txt +irritation, pg3200.txt +irritation. pg31100.txt, pg3200.txt +irritations pg3200.txt +irritations. pg31100.txt +irruption pg3200.txt +irruption: pg3200.txt +irs. pg31100.txt, pg3200.txt, pg100.txt +irving pg3200.txt +irving's pg3200.txt +irving. pg3200.txt +is! pg31100.txt, pg3200.txt, pg100.txt +is!" pg31100.txt, pg3200.txt +is!"--and pg3200.txt +is!' pg3200.txt +is!- pg100.txt +is'- pg100.txt +is't pg100.txt +is't, pg100.txt +is't. pg100.txt +is't; pg100.txt +is't? pg100.txt +is) pg100.txt +is, pg31100.txt, pg3200.txt, pg100.txt +is," pg3200.txt +is- pg100.txt +is-- pg3200.txt +is--" pg31100.txt, pg3200.txt +is--' pg3200.txt +is--(but pg31100.txt +is--and pg3200.txt +is--but pg3200.txt +is--guess." pg3200.txt +is--i pg31100.txt, pg3200.txt +is--only pg31100.txt +is--thirty pg3200.txt +is--tranquillity: pg3200.txt +is--what pg3200.txt +is--when pg3200.txt +is--why, pg3200.txt +is-remarkable. pg3200.txt +is. pg31100.txt, pg3200.txt, pg100.txt +is." pg31100.txt, pg3200.txt +is.") pg3200.txt +is.' pg3200.txt +is.) pg3200.txt +is: pg3200.txt, pg100.txt +is; pg31100.txt, pg3200.txt, pg100.txt +is? pg31100.txt, pg3200.txt, pg100.txt +is?" pg31100.txt, pg3200.txt +is?' pg3200.txt +isaac, pg3200.txt +isaacs pg3200.txt +isabel pg31100.txt, pg100.txt +isabel! pg100.txt +isabel's. pg100.txt +isabel, pg100.txt +isabel. pg100.txt +isabel? pg100.txt +isabella pg31100.txt, pg100.txt +isabella!" pg31100.txt +isabella" pg31100.txt +isabella's pg31100.txt +isabella, pg31100.txt, pg3200.txt, pg100.txt +isabella," pg31100.txt +isabella. pg31100.txt, pg100.txt +isabella: pg31100.txt +isabella; pg31100.txt +isambard pg3200.txt +iscariot." pg3200.txt +ischia. pg3200.txt +isidore pg100.txt +isidore? pg100.txt +isis pg100.txt +islan' pg3200.txt +islan'?" pg3200.txt +island pg31100.txt, pg3200.txt, pg100.txt +island! pg3200.txt, pg100.txt +island, pg3200.txt, pg100.txt +island- pg100.txt +island. pg3200.txt, pg100.txt +island." pg3200.txt +island; pg3200.txt, pg100.txt +islander: pg3200.txt +islanders pg100.txt +islanders, pg100.txt +islanders- pg100.txt +islands pg3200.txt, pg100.txt +islands, pg3200.txt +islands--i pg3200.txt +islands. pg3200.txt, pg100.txt +islands." pg3200.txt +islands: pg3200.txt +isle pg31100.txt, pg100.txt +isle! pg100.txt +isle"--an pg3200.txt +isle, pg100.txt +isle--' pg3200.txt +isle. pg100.txt +isle; pg100.txt +isle? pg100.txt +isles pg3200.txt, pg100.txt +isles"--the pg3200.txt +isles. pg3200.txt +islington pg3200.txt +isn't pg3200.txt +isn't, pg3200.txt +isn't-anything,-i-can-do-it-any-time-i- pg3200.txt +isn't. pg3200.txt +isn't." pg3200.txt +isn't; pg3200.txt +isobel pg31100.txt +isolated pg3200.txt +isolated, pg3200.txt +isolation pg3200.txt +isolation. pg3200.txt +isoperimetrical pg3200.txt +israel pg3200.txt +israel's pg3200.txt +israel. pg3200.txt +israelites pg3200.txt +israelites, pg3200.txt +israelitische pg3200.txt +issue pg3200.txt, pg100.txt +issue! pg3200.txt, pg100.txt +issue, pg31100.txt, pg3200.txt, pg100.txt +issue. pg3200.txt, pg100.txt +issue; pg100.txt +issue? pg100.txt +issued pg31100.txt, pg3200.txt, pg100.txt +issued. pg3200.txt, pg100.txt +issues pg3200.txt +issues, pg100.txt +issuing pg3200.txt +ist pg3200.txt +ist. pg3200.txt +isthmus. pg3200.txt +it! pg31100.txt, pg3200.txt, pg100.txt +it!" pg31100.txt, pg3200.txt +it!") pg3200.txt +it!"--meaning pg3200.txt +it!"--whereat pg3200.txt +it!' pg3200.txt +it!- pg100.txt +it!--for pg3200.txt +it!--her pg31100.txt +it!--looking pg3200.txt +it!--to pg3200.txt +it!_"] pg31100.txt +it" pg31100.txt, pg3200.txt +it"). pg3200.txt +it'll pg3200.txt +it's pg3200.txt +it's--" pg3200.txt +it) pg31100.txt, pg3200.txt +it), pg31100.txt, pg100.txt +it," pg31100.txt, pg3200.txt +it,) pg3200.txt +it,--" pg3200.txt +it,--but pg31100.txt, pg3200.txt +it,--which pg3200.txt +it- pg100.txt +it-- pg31100.txt, pg3200.txt, pg100.txt +it--" pg31100.txt, pg3200.txt +it--"and pg3200.txt +it--"come, pg31100.txt +it--' pg3200.txt +it--'god pg3200.txt +it---- pg3200.txt +it--. pg31100.txt +it--a pg31100.txt +it--an pg3200.txt +it--and pg3200.txt +it--and--" pg3200.txt +it--as pg3200.txt +it--awake pg3200.txt +it--bgwjjilligkkk!" pg3200.txt +it--but pg31100.txt, pg3200.txt +it--can't pg3200.txt +it--cockroaches pg3200.txt +it--didn't pg3200.txt +it--each pg3200.txt +it--every pg3200.txt +it--except pg3200.txt +it--feebly: pg3200.txt +it--for pg3200.txt +it--go pg3200.txt +it--hardly pg3200.txt +it--has pg3200.txt +it--heal pg3200.txt +it--huck!" pg3200.txt +it--i pg31100.txt, pg3200.txt +it--i've pg3200.txt +it--indeed, pg3200.txt +it--intellectuality. pg3200.txt +it--it pg3200.txt +it--julie pg3200.txt +it--just pg3200.txt +it--likes pg3200.txt +it--millions." pg3200.txt +it--namely, pg3200.txt +it--never." pg3200.txt +it--nothing!" pg3200.txt +it--now pg3200.txt +it--oh! pg31100.txt +it--only pg3200.txt +it--or pg31100.txt +it--partly pg3200.txt +it--perhaps pg3200.txt +it--pounce pg3200.txt +it--she pg3200.txt +it--shut pg3200.txt +it--so pg31100.txt +it--some pg3200.txt +it--something pg3200.txt +it--stared pg3200.txt +it--tallow pg3200.txt +it--that pg3200.txt +it--the pg3200.txt +it--then pg3200.txt +it--there pg3200.txt +it--there, pg3200.txt +it--therefore pg3200.txt +it--this pg3200.txt +it--twenty-two pg3200.txt +it--very pg31100.txt +it--wasn't pg3200.txt +it--we pg31100.txt +it--well, pg3200.txt +it--when pg3200.txt +it--which pg3200.txt +it--with pg31100.txt +it--you pg31100.txt, pg3200.txt +it-well. pg100.txt +it." pg31100.txt, pg3200.txt +it."--but, pg31100.txt +it."--even pg31100.txt +it.' pg31100.txt, pg3200.txt, pg100.txt +it.'" pg3200.txt +it.) pg3200.txt +it.- pg100.txt +it.--_editor_] pg3200.txt +it.--editor. pg3200.txt +it.--i pg3200.txt +it.--it pg31100.txt +it.--she pg31100.txt +it.--talking pg31100.txt +it.--there pg31100.txt +it.--what pg31100.txt +it.. pg31100.txt +it.... pg3200.txt +it..... pg3200.txt +it.] pg3200.txt, pg100.txt +it.]--by pg3200.txt +it._"] pg31100.txt +it: pg31100.txt, pg3200.txt, pg100.txt +it:-- pg3200.txt +it; pg31100.txt, pg3200.txt, pg100.txt +it;) pg31100.txt +it? pg31100.txt, pg3200.txt, pg100.txt +it?" pg31100.txt, pg3200.txt +it?"-- pg31100.txt +it?' pg3200.txt +it?--and pg3200.txt +it?--chuckleheads." pg3200.txt +it?--cramer.--and pg31100.txt +it?--the pg3200.txt +it?--what pg3200.txt +it?. pg3200.txt +it?..... pg3200.txt +it] pg100.txt +it_. pg3200.txt +italian pg3200.txt +italian, pg3200.txt, pg100.txt +italian- pg100.txt +italian. pg3200.txt, pg100.txt +italian: pg3200.txt +italiano pg3200.txt +italians pg3200.txt +italians, pg3200.txt +italians. pg3200.txt +italicized pg3200.txt +italicizes pg3200.txt +italics pg3200.txt +italy pg3200.txt, pg100.txt +italy, pg3200.txt, pg100.txt +italy- pg100.txt +italy--the pg3200.txt +italy. pg3200.txt, pg100.txt +italy; pg3200.txt, pg100.txt +italy? pg3200.txt, pg100.txt +italyan, pg3200.txt +itch pg3200.txt +itch. pg3200.txt, pg100.txt +itch; pg100.txt +itches pg100.txt +itching pg3200.txt +itching, pg3200.txt +item pg3200.txt +item, pg3200.txt, pg100.txt +itemised pg3200.txt +itemized pg3200.txt +items pg3200.txt +items, pg3200.txt +items. pg3200.txt, pg100.txt +items." pg3200.txt +iterated, pg3200.txt +iteration- pg100.txt +itinerary pg3200.txt +itself pg31100.txt, pg3200.txt, pg100.txt +itself! pg3200.txt, pg100.txt +itself, pg31100.txt, pg3200.txt, pg100.txt +itself- pg100.txt +itself--publish pg3200.txt +itself. pg31100.txt, pg3200.txt, pg100.txt +itself." pg31100.txt, pg3200.txt +itself: pg3200.txt, pg100.txt +itself; pg3200.txt, pg100.txt +itself? pg3200.txt, pg100.txt +iv pg31100.txt, pg3200.txt, pg100.txt +iv. pg3200.txt, pg100.txt +ivanhoe, pg3200.txt +ivory pg3200.txt +ivory; pg3200.txt +ivrea, pg3200.txt +ivrea. pg3200.txt +ivy pg3200.txt, pg100.txt +ivy-grown, pg3200.txt +iwis pg100.txt +ix pg31100.txt, pg3200.txt +ix. pg3200.txt, pg100.txt +ix.: pg3200.txt +j'apprenne pg100.txt +j'en pg3200.txt +j'int pg3200.txt +j'oublie? pg100.txt +j. pg3200.txt +j.: pg3200.txt +ja--zuweilen. pg3200.txt +jabbed pg3200.txt +jabbered pg3200.txt +jabbered; pg3200.txt +jabers. pg3200.txt +jabers.' pg3200.txt +jacet.' pg100.txt +jack pg3200.txt, pg100.txt +jack! pg3200.txt +jack!" pg3200.txt +jack's pg3200.txt +jack, pg100.txt +jack- pg100.txt +jack--don't pg3200.txt +jack-a-lent pg100.txt +jack-ass, pg3200.txt +jack-plane." pg3200.txt +jack-planed pg3200.txt +jack-staff; pg3200.txt +jack. pg3200.txt, pg100.txt +jack: pg3200.txt +jack; pg3200.txt +jack? pg100.txt +jack?" pg3200.txt +jackass pg3200.txt +jackass, pg3200.txt +jackass--yes, pg3200.txt +jackass. pg3200.txt +jackass." pg3200.txt +jacket pg3200.txt +jacket!" pg3200.txt +jacket, pg3200.txt +jacket. pg3200.txt +jacket." pg3200.txt +jackets pg3200.txt +jacks, pg100.txt +jacks? pg100.txt +jacksauce pg100.txt +jackson pg3200.txt +jackson's pg3200.txt +jackson, pg3200.txt +jackson--which pg3200.txt +jackson. pg3200.txt +jackson." pg3200.txt +jackson; pg3200.txt +jacob pg3200.txt +jacob's. pg100.txt +jacob, pg3200.txt +jacob: pg3200.txt +jacobs pg3200.txt +jacobs' pg3200.txt +jacques." pg3200.txt +jade pg3200.txt, pg100.txt +jade, pg100.txt +jade. pg100.txt +jade; pg100.txt +jaded, pg3200.txt +jades pg100.txt +jades' pg100.txt +jades, pg100.txt +jades. pg100.txt +jades; pg100.txt +jael pg3200.txt +jaffa pg3200.txt +jaffa. pg3200.txt +jagged pg3200.txt +jahre pg3200.txt +jail pg3200.txt +jail, pg3200.txt +jail-terms. pg3200.txt +jail. pg3200.txt, pg100.txt +jail." pg3200.txt +jailed pg3200.txt +jailed." pg3200.txt +jailer pg3200.txt +jailer's pg3200.txt +jails. pg3200.txt +jain pg3200.txt +jake pg3200.txt +jake's pg3200.txt +jake, pg3200.txt +jake?" pg3200.txt +jalon, pg3200.txt +jam pg3200.txt +jam, pg3200.txt +jamany. pg100.txt +jamberoo pg3200.txt +jamberoo; pg3200.txt +james pg31100.txt, pg3200.txt +james's pg31100.txt +james's. pg31100.txt +james's?" pg31100.txt +james, pg31100.txt, pg100.txt +james. pg31100.txt, pg100.txt +james; pg31100.txt +james?" pg31100.txt +jameses pg3200.txt +jameson pg3200.txt +jameson's pg3200.txt +jameson, pg3200.txt +jamestown pg3200.txt +jamestown. pg3200.txt +jammed pg3200.txt +jamrach pg3200.txt +jamy pg100.txt +jamy, pg100.txt +jan. pg3200.txt +jane pg31100.txt, pg3200.txt +jane! pg31100.txt +jane's pg31100.txt, pg3200.txt +jane's." pg31100.txt +jane, pg31100.txt +jane," pg3200.txt +jane--that pg31100.txt +jane. pg31100.txt +jane." pg31100.txt, pg3200.txt +jane.) pg31100.txt +jane; pg31100.txt +jane?" pg31100.txt +jane?--'so pg31100.txt +janet pg31100.txt, pg3200.txt +janetta, pg31100.txt +jangles pg3200.txt +janitor pg3200.txt +janitor. pg3200.txt +january pg3200.txt, pg100.txt +january, pg3200.txt +january--wednesday. pg3200.txt +january. pg31100.txt, pg3200.txt, pg100.txt +january." pg31100.txt, pg3200.txt +january; pg31100.txt +january? pg3200.txt +janus, pg100.txt +janville. pg3200.txt +japan pg3200.txt +japan, pg3200.txt +japan--" pg3200.txt +japanesich. pg3200.txt +japonica pg3200.txt +jaquenetta pg100.txt +jaquenetta. pg100.txt +jaquenetta; pg100.txt +jaques pg100.txt +jaques, pg100.txt +jaques. pg100.txt +jaques? pg100.txt +jar pg3200.txt, pg100.txt +jar! pg100.txt +jar. pg100.txt +jargeau pg3200.txt +jargeau, pg3200.txt +jargeau. pg3200.txt +jars pg100.txt +jars, pg3200.txt +jars. pg100.txt +jars; pg100.txt +jarteer pg100.txt +jarteer-have pg100.txt +jarteer? pg100.txt +jas. pg3200.txt +jasmine, pg3200.txt +jasper, pg3200.txt +jasper-headed, pg3200.txt +jaundice pg100.txt +jaunty pg3200.txt +javelins pg3200.txt +jaw pg3200.txt +jaw, pg3200.txt +jaw; pg100.txt +jawbone, pg100.txt +jawge?" pg3200.txt +jawing pg3200.txt +jaws pg3200.txt, pg100.txt +jaws, pg3200.txt, pg100.txt +jaws. pg3200.txt +jay pg3200.txt +jay, pg100.txt +jays. pg100.txt +jays] pg3200.txt +je pg3200.txt, pg100.txt +jealous pg31100.txt, pg3200.txt, pg100.txt +jealous, pg3200.txt +jealous--and pg3200.txt +jealous. pg100.txt +jealous." pg31100.txt, pg3200.txt +jealous; pg31100.txt, pg3200.txt, pg100.txt +jealous? pg100.txt +jealousies pg100.txt +jealousies, pg3200.txt, pg100.txt +jealousies. pg100.txt +jealously pg3200.txt +jealousy pg31100.txt, pg3200.txt, pg100.txt +jealousy! pg100.txt +jealousy, pg31100.txt, pg3200.txt, pg100.txt +jealousy. pg31100.txt, pg100.txt +jealousy; pg100.txt +jealousy? pg100.txt +jean pg3200.txt +jean!" pg3200.txt +jean's pg3200.txt +jean, pg3200.txt +jean-ah pg3200.txt +jean. pg3200.txt +jeannette pg3200.txt +jeans-clad pg3200.txt +jeer; pg3200.txt +jeered pg3200.txt +jeering pg3200.txt +jeerings pg3200.txt +jeers pg3200.txt +jeff pg3200.txt +jeff?" pg3200.txt +jehu pg3200.txt +jehu, pg3200.txt +jejune pg3200.txt +jelly pg3200.txt +jelly! pg100.txt +jelly, pg3200.txt +jelud. pg3200.txt +jemima pg31100.txt +jenkins pg3200.txt +jenkinson pg31100.txt +jenness pg3200.txt +jennings pg31100.txt +jennings's pg31100.txt +jennings's. pg31100.txt +jennings, pg31100.txt +jennings," pg31100.txt +jennings. pg31100.txt +jennings._] pg31100.txt +jeopardy. pg100.txt +jephthah? pg100.txt +jeremiah pg3200.txt +jericho pg3200.txt +jericho--anywhere.' pg3200.txt +jericho. pg3200.txt +jerk pg3200.txt +jerk. pg3200.txt +jerked pg3200.txt +jerkin. pg100.txt +jerkin? pg100.txt +jerkins pg3200.txt +jerks pg3200.txt, pg100.txt +jerks, pg3200.txt +jerky pg3200.txt +jerome pg3200.txt +jerome. pg3200.txt +jerrilderie pg3200.txt +jerrilderie; pg3200.txt +jerry. pg3200.txt +jersey pg3200.txt +jersey, pg3200.txt +jersey--' pg3200.txt +jersey." pg3200.txt +jersey.' pg3200.txt +jerusalem pg3200.txt +jerusalem! pg3200.txt +jerusalem, pg3200.txt, pg100.txt +jerusalem--church pg3200.txt +jerusalem--exhausted pg3200.txt +jerusalem. pg3200.txt, pg100.txt +jerusalem; pg100.txt +jes pg3200.txt +jes' pg3200.txt +jesse's pg3200.txt +jessica pg100.txt +jessica! pg100.txt +jessica!- pg100.txt +jessica, pg100.txt +jessica. pg100.txt +jessica: pg100.txt +jessica; pg100.txt +jessica? pg100.txt +jest pg3200.txt, pg100.txt +jest!" pg3200.txt +jest, pg3200.txt, pg100.txt +jest. pg100.txt +jest." pg3200.txt +jest: pg100.txt +jest; pg100.txt +jest? pg100.txt +jest?" pg3200.txt +jester pg100.txt +jester! pg100.txt +jester, pg100.txt +jester. pg100.txt +jesters pg3200.txt +jesting pg3200.txt +jesting, pg3200.txt +jestingly. pg3200.txt +jests pg3200.txt +jests! pg100.txt +jests, pg100.txt +jests. pg100.txt +jests; pg100.txt +jesu pg3200.txt +jesu, pg100.txt +jesuits pg3200.txt +jesus pg3200.txt +jesus, pg3200.txt +jesus-- pg3200.txt +jesus--graves pg3200.txt +jesus. pg3200.txt +jesus." pg3200.txt +jesus.'" pg3200.txt +jet pg100.txt +jet, pg100.txt +jet. pg100.txt +jets pg3200.txt +jetzt? pg3200.txt +jew pg3200.txt, pg100.txt +jew! pg100.txt +jew" pg3200.txt +jew's pg100.txt +jew's? pg100.txt +jew, pg3200.txt, pg100.txt +jew--a pg3200.txt +jew. pg100.txt +jew." pg3200.txt +jew; pg3200.txt +jew? pg100.txt +jewel pg3200.txt, pg100.txt +jewel, pg3200.txt, pg100.txt +jewel-closet pg3200.txt +jewel. pg3200.txt, pg100.txt +jewel; pg100.txt +jewel? pg100.txt +jeweled pg3200.txt +jeweler pg3200.txt +jewell pg3200.txt +jewell's pg3200.txt +jewelled pg3200.txt +jeweller pg100.txt +jeweller. pg100.txt +jewellers' pg31100.txt +jewelry pg3200.txt +jewelry, pg3200.txt +jewelry. pg3200.txt +jewelry." pg3200.txt +jewels pg31100.txt, pg3200.txt, pg100.txt +jewels! pg3200.txt +jewels, pg3200.txt, pg100.txt +jewels. pg3200.txt, pg100.txt +jewett pg3200.txt +jewish pg3200.txt +jewry pg100.txt +jews pg3200.txt +jews, pg3200.txt +jews-harp's pg3200.txt +jews.' pg3200.txt +jex pg3200.txt +jezebel! pg100.txt +jezreel. pg3200.txt +jib. pg3200.txt +jiffy, pg3200.txt +jig, pg100.txt +jigger pg3200.txt +jiggering pg3200.txt +jill; pg100.txt +jilting pg31100.txt +jim pg3200.txt +jim! pg3200.txt +jim!" pg3200.txt +jim's pg3200.txt +jim's, pg3200.txt +jim, pg3200.txt +jim," pg3200.txt +jim--if pg3200.txt +jim--particularly pg3200.txt +jim--though, pg3200.txt +jim. pg3200.txt +jim." pg3200.txt +jim: pg3200.txt +jim; pg3200.txt +jim?" pg3200.txt +jimmy pg3200.txt +jimmy. pg3200.txt +jimmy.' pg3200.txt +jimpson pg3200.txt +jingle pg3200.txt +jingle. pg3200.txt +jingled pg3200.txt +jingoes!" pg3200.txt +jings." pg3200.txt +jinny's pg3200.txt +jinriksha--our pg3200.txt +jis' pg3200.txt +jist pg3200.txt +jno. pg3200.txt +jo.?" pg3200.txt +jo?" pg3200.txt +joan pg3200.txt +joan!" pg3200.txt +joan's pg3200.txt +joan's. pg3200.txt +joan, pg3200.txt +joan--and pg3200.txt +joan--next pg3200.txt +joan-orleans; pg3200.txt +joan. pg3200.txt, pg100.txt +joan." pg3200.txt +joan: pg3200.txt +joan; pg3200.txt +joan?" pg3200.txt +job pg3200.txt +job! pg3200.txt +job, pg3200.txt +job. pg3200.txt +job." pg3200.txt +job;'--for, pg31100.txt +job? pg100.txt +jocund pg100.txt +jodel pg3200.txt +jodeled pg3200.txt +jodelers, pg3200.txt +joe pg3200.txt +joe! pg3200.txt +joe!" pg3200.txt +joe's pg3200.txt +joe's! pg3200.txt +joe's. pg3200.txt +joe's: pg3200.txt +joe, pg3200.txt +joe. pg3200.txt +joe." pg3200.txt +joe?" pg3200.txt +jog. pg3200.txt +joggle pg3200.txt +johannes pg3200.txt +johannesburg pg3200.txt +johannesburg, pg3200.txt +johannesburg. pg3200.txt +johannesburg; pg3200.txt +johannesburgers, pg3200.txt +john pg31100.txt, pg3200.txt, pg100.txt +john! pg3200.txt, pg100.txt +john!" pg3200.txt +john's pg31100.txt, pg100.txt +john's. pg100.txt +john, pg31100.txt, pg3200.txt, pg100.txt +john- pg100.txt +john-- pg3200.txt +john--in pg3200.txt +john. pg31100.txt, pg100.txt +john." pg3200.txt +john; pg100.txt +john? pg100.txt +john?" pg31100.txt +john]. pg100.txt +johnnie pg3200.txt +johnny pg3200.txt +johnny, pg3200.txt +johnny. pg3200.txt +johnny?" pg3200.txt +johnry pg3200.txt +johnson pg31100.txt, pg3200.txt +johnson! pg3200.txt +johnson's pg3200.txt +johnson, pg31100.txt, pg3200.txt +johnson----." pg3200.txt +johnson. pg31100.txt, pg3200.txt +johnson? pg3200.txt +johnstown pg3200.txt +join pg31100.txt, pg3200.txt, pg100.txt +join' pg3200.txt +join'd, pg100.txt +join'd-stool. pg100.txt +join'd. pg100.txt +join'd; pg100.txt +join, pg3200.txt, pg100.txt +join. pg3200.txt +join: pg100.txt +join? pg3200.txt +join?" pg3200.txt +joined pg31100.txt, pg3200.txt +joined, pg31100.txt +joiner pg100.txt +joiner. pg100.txt +joining pg31100.txt, pg3200.txt +joining. pg3200.txt +joins pg31100.txt, pg3200.txt +joint pg31100.txt, pg3200.txt, pg100.txt +joint-stool. pg100.txt +joint. pg100.txt +joint: pg3200.txt +jointed pg100.txt +joints pg3200.txt, pg100.txt +joints! pg100.txt +joints, pg3200.txt, pg100.txt +joints. pg3200.txt +joints; pg100.txt +jointure, pg100.txt +jointure. pg31100.txt, pg100.txt +joke pg31100.txt, pg3200.txt +joke, pg31100.txt, pg3200.txt +joke--suspected pg31100.txt +joke--up pg3200.txt +joke. pg31100.txt, pg3200.txt +joke." pg31100.txt +joke; pg3200.txt +joke?' pg3200.txt +joked pg3200.txt +joker pg3200.txt +joker, pg3200.txt +joker. pg3200.txt +joker." pg3200.txt +jokers pg3200.txt +jokes pg3200.txt +jokes, pg31100.txt, pg3200.txt +jokes. pg31100.txt +jokes." pg3200.txt +joking pg31100.txt +joking. pg3200.txt +joking." pg3200.txt +joking? pg3200.txt +joliet pg3200.txt +jolliest pg3200.txt +jollification pg3200.txt +jollity pg3200.txt +jollity, pg100.txt +jollity. pg100.txt +jolly pg3200.txt +jolly. pg3200.txt, pg100.txt +jolt pg3200.txt +jolted pg3200.txt +jones pg31100.txt, pg3200.txt +jones!" pg3200.txt +jones's pg3200.txt +jones, pg3200.txt +jones," pg3200.txt +jones--name pg3200.txt +jones." pg3200.txt +jonesborough; pg3200.txt +jonson pg3200.txt +jonson, pg3200.txt +jonson. pg3200.txt +jonson? pg3200.txt +joppa pg3200.txt +joppa. pg3200.txt +jordan pg3200.txt +jordan, pg3200.txt +jordan. pg3200.txt +joseph pg3200.txt +joseph, pg3200.txt +joseph--joseph's pg3200.txt +joseph. pg3200.txt +josh pg3200.txt +josh-lights pg3200.txt +joshua pg3200.txt +joshua." pg3200.txt +josie, pg3200.txt +jot pg3200.txt, pg100.txt +jot. pg100.txt +jot; pg100.txt +jotted pg3200.txt +jotting pg3200.txt +jour, pg100.txt +jourdain, pg100.txt +journal pg3200.txt +journal!" pg31100.txt, pg3200.txt +journal'. pg3200.txt +journal, pg3200.txt +journal,' pg3200.txt +journal--' pg3200.txt +journal. pg3200.txt +journal." pg31100.txt, pg3200.txt +journal: pg3200.txt +journal? pg3200.txt +journaling pg31100.txt +journalism pg3200.txt +journalism, pg3200.txt +journalism. pg3200.txt +journalism; pg3200.txt +journalist pg3200.txt +journalist--an pg3200.txt +journalist. pg3200.txt +journalistic pg3200.txt +journalists, pg3200.txt +journals pg3200.txt +journals, pg3200.txt +journals. pg3200.txt +journals.]} pg3200.txt +journals; pg3200.txt +journey pg31100.txt, pg3200.txt, pg100.txt +journey!" pg31100.txt +journey, pg31100.txt, pg3200.txt, pg100.txt +journey--. pg31100.txt +journey. pg31100.txt, pg3200.txt, pg100.txt +journey." pg31100.txt, pg3200.txt +journey."]--was pg3200.txt +journey.] pg3200.txt +journey; pg3200.txt +journey? pg100.txt +journeying pg3200.txt +journeyman pg3200.txt +journeys pg3200.txt +journeys. pg3200.txt +joust pg3200.txt +jove pg100.txt +jove! pg100.txt +jove's pg100.txt +jove, pg3200.txt, pg100.txt +jove. pg100.txt +jove; pg100.txt +jow', pg3200.txt +jowl. pg100.txt +joy pg31100.txt, pg3200.txt, pg100.txt +joy! pg31100.txt, pg100.txt +joy!" pg3200.txt +joy, pg31100.txt, pg3200.txt, pg100.txt +joy- pg100.txt +joy--yet pg31100.txt +joy-flames; pg3200.txt +joy. pg31100.txt, pg3200.txt, pg100.txt +joy." pg31100.txt, pg3200.txt +joy.' pg3200.txt +joy: pg31100.txt, pg3200.txt, pg100.txt +joy; pg3200.txt, pg100.txt +joy? pg100.txt +joy?" pg31100.txt +joy?' pg100.txt +joyce, pg3200.txt +joyful pg31100.txt, pg3200.txt, pg100.txt +joyful, pg3200.txt, pg100.txt +joyful." pg3200.txt +joyfully pg3200.txt +joyfully; pg31100.txt +joyless. pg100.txt +joynge pg3200.txt +joyous pg3200.txt +joyous-talking pg31100.txt +joyous. pg100.txt +joyous; pg3200.txt +joyously pg3200.txt +joyously, pg3200.txt +joyousness. pg3200.txt +joyousness: pg3200.txt +joys pg3200.txt, pg100.txt +joys, pg100.txt +joys. pg100.txt +joys; pg100.txt +jr. pg3200.txt +jr., pg3200.txt +jubilant pg3200.txt +jubilant, pg3200.txt +jubilant. pg3200.txt +jubilantly pg3200.txt +jubilate, pg3200.txt +jubilee pg3200.txt +jubilees pg3200.txt +jubiter pg3200.txt +jubiter. pg3200.txt +jubiter." pg3200.txt +jubiter: pg3200.txt +juck-- pg3200.txt +juckhe! pg3200.txt +judas pg100.txt +judas! pg100.txt +judas!' pg3200.txt +judas's. pg100.txt +judas. pg100.txt +judas? pg100.txt +judd's pg3200.txt +judg'd pg100.txt +judge pg31100.txt, pg3200.txt, pg100.txt +judge! pg3200.txt, pg100.txt +judge, pg31100.txt, pg3200.txt, pg100.txt +judge- pg100.txt +judge--arrest pg3200.txt +judge--but pg3200.txt +judge. pg3200.txt, pg100.txt +judge." pg31100.txt, pg3200.txt +judge.) pg3200.txt +judge.--m.t.] pg3200.txt +judge: pg3200.txt, pg100.txt +judge; pg31100.txt, pg3200.txt, pg100.txt +judge? pg100.txt +judge?" pg31100.txt, pg3200.txt +judged pg31100.txt, pg3200.txt +judged, pg3200.txt +judged. pg3200.txt +judged." pg3200.txt +judgement pg31100.txt +judgement, pg100.txt +judgement. pg100.txt +judgements pg31100.txt +judges pg3200.txt, pg100.txt +judges, pg3200.txt +judges. pg3200.txt, pg100.txt +judges: pg3200.txt +judges; pg3200.txt +judging pg31100.txt, pg3200.txt +judging, pg31100.txt +judgment pg31100.txt, pg3200.txt, pg100.txt +judgment! pg100.txt +judgment, pg31100.txt, pg3200.txt, pg100.txt +judgment--" pg31100.txt +judgment--that pg3200.txt +judgment-day pg100.txt +judgment-day. pg100.txt +judgment-seat pg3200.txt +judgment. pg31100.txt, pg3200.txt, pg100.txt +judgment." pg31100.txt, pg3200.txt +judgment; pg31100.txt, pg3200.txt, pg100.txt +judgment? pg100.txt +judgments pg31100.txt, pg3200.txt, pg100.txt +judgments, pg31100.txt, pg3200.txt, pg100.txt +judgmint pg3200.txt +judicial pg3200.txt +judicious pg3200.txt, pg100.txt +judicious, pg31100.txt +judiciously pg3200.txt +judiciously. pg3200.txt +judiciousness pg3200.txt +judy pg3200.txt +jug pg3200.txt +jug. pg3200.txt +jug; pg3200.txt +juggle pg100.txt +jugglers, pg3200.txt +juggling. pg100.txt +jugmente, pg3200.txt +jugs, pg3200.txt +jugular. pg3200.txt +juice pg3200.txt +juice, pg100.txt +juice-harp." pg3200.txt +juice-harp; pg3200.txt +juiceless pg3200.txt +julalabad, pg3200.txt +jules pg3200.txt +jules--paradise pg3200.txt +julesburg, pg3200.txt +julia pg31100.txt, pg100.txt +julia! pg100.txt +julia's pg31100.txt +julia, pg31100.txt, pg100.txt +julia. pg31100.txt, pg100.txt +julia." pg31100.txt +julia? pg100.txt +julias pg31100.txt +juliet pg100.txt +juliet! pg100.txt +juliet, pg100.txt +juliet. pg3200.txt, pg100.txt +juliet; pg100.txt +juliet? pg100.txt +julio pg100.txt +julius pg3200.txt +july pg3200.txt, pg100.txt +july, pg3200.txt +july--because pg3200.txt +july. pg3200.txt +july?" pg3200.txt +jumble pg3200.txt +jumbling pg3200.txt +jumbo pg3200.txt +jump pg31100.txt, pg3200.txt, pg100.txt +jump!" pg3200.txt +jump, pg3200.txt +jump. pg3200.txt +jump." pg31100.txt +jump; pg3200.txt +jumped pg3200.txt +jumped, pg3200.txt +jumpin' pg3200.txt +jumping pg31100.txt, pg3200.txt +jumping, pg3200.txt +jumps pg3200.txt +junction pg3200.txt +juncture pg3200.txt +june pg31100.txt, pg3200.txt +june, pg31100.txt, pg3200.txt, pg100.txt +june- pg100.txt +june--but pg3200.txt +june. pg3200.txt +june." pg31100.txt, pg3200.txt +june; pg31100.txt, pg3200.txt +jungen pg3200.txt +jungfrau pg3200.txt +jungle pg3200.txt +jungle. pg3200.txt +junior, pg31100.txt +juniors. pg3200.txt +junk pg3200.txt +junketings pg3200.txt +juno pg100.txt +juno! pg100.txt +jupiter pg100.txt +jupiter! pg100.txt +jupiter, pg3200.txt, pg100.txt +jupiter- pg100.txt +jupiter--" pg3200.txt +jupiter. pg100.txt +jupiter; pg100.txt +jupiter? pg100.txt +juries, pg3200.txt +juries." pg3200.txt +juries? pg3200.txt +juris' pg3200.txt +jurisdiction pg3200.txt +jurisdiction, pg3200.txt +jurisdiction. pg3200.txt +jurisdictions: pg3200.txt +jurisprudence, pg3200.txt +jurist pg3200.txt +juror, pg100.txt +juror. pg3200.txt +jurors pg3200.txt +jurors--a pg3200.txt +jurors. pg3200.txt +jury pg3200.txt +jury). pg3200.txt +jury, pg3200.txt +jury--" pg3200.txt +jury--to pg3200.txt +jury-room. pg3200.txt +jury. pg3200.txt +jury? pg3200.txt +jusqu'a pg3200.txt +just pg31100.txt, pg3200.txt, pg100.txt +just! pg100.txt +just, pg31100.txt, pg3200.txt, pg100.txt +just-- pg3200.txt +just--" pg3200.txt +just--but pg31100.txt +just. pg3200.txt, pg100.txt +just." pg3200.txt +just; pg31100.txt, pg100.txt +justeius, pg100.txt +justice pg31100.txt, pg3200.txt, pg100.txt +justice! pg3200.txt, pg100.txt +justice, pg31100.txt, pg3200.txt, pg100.txt +justice,' pg3200.txt +justice- pg100.txt +justice--only pg31100.txt +justice. pg31100.txt, pg3200.txt, pg100.txt +justice." pg31100.txt, pg3200.txt +justice: pg100.txt +justice; pg100.txt +justicer. pg100.txt +justices pg100.txt +justices. pg100.txt +justifiable pg31100.txt, pg3200.txt +justifiable, pg3200.txt +justification pg31100.txt, pg3200.txt +justification. pg31100.txt, pg3200.txt +justified pg31100.txt, pg3200.txt, pg100.txt +justified, pg31100.txt, pg3200.txt +justified. pg3200.txt +justified." pg31100.txt +justified; pg31100.txt +justified? pg100.txt +justified?" pg31100.txt +justifies pg31100.txt +justify pg31100.txt, pg3200.txt +justify't. pg100.txt +justify. pg31100.txt +justify; pg100.txt +justifying. pg3200.txt +justle pg100.txt +justly pg31100.txt, pg3200.txt, pg100.txt +justly. pg100.txt +justness pg3200.txt +juvenal? pg100.txt +k'look--klook-klook-klook-look-look! pg3200.txt +k'yar pg3200.txt +k-----. pg3200.txt +k. pg3200.txt +k., pg3200.txt +ka-go- pg3200.txt +kaaba pg3200.txt +kaahumanu. pg3200.txt +kaikoura pg3200.txt +kailua. pg3200.txt +kaiser pg3200.txt +kaiser, pg3200.txt +kaiwaka pg3200.txt +kalula pg3200.txt +kalula: pg3200.txt +kam. pg100.txt +kamehameha pg3200.txt +kanaka pg3200.txt +kanaka, pg3200.txt +kanaka--the pg3200.txt +kanaka. pg3200.txt +kanakas pg3200.txt +kanakas, pg3200.txt +kanakas. pg3200.txt +kanawha pg3200.txt +kandersteg, pg3200.txt +kane, pg3200.txt +kane. pg3200.txt +kangaroo pg3200.txt +kangaroo, pg3200.txt +kangaroo. pg3200.txt +kann, pg3200.txt +kann? pg3200.txt +kans.: pg3200.txt +kansas pg3200.txt +kansas, pg3200.txt +kansas. pg3200.txt +kansas; pg3200.txt +kapunda pg3200.txt +karlstrasse, pg3200.txt +karlstrasse; pg3200.txt +kashi' pg3200.txt +kate pg100.txt +kate! pg100.txt +kate, pg100.txt +kate,? pg100.txt +kate. pg100.txt +kate: pg100.txt +kate; pg100.txt +kate? pg100.txt +kated. pg100.txt +kates. pg100.txt +katharine pg100.txt +katharine, pg100.txt +katharine? pg100.txt +katharine] pg100.txt +katherina pg100.txt +katherina! pg100.txt +katherina, pg100.txt +katherina. pg100.txt +katherine, pg100.txt +katherine. pg100.txt +katherine; pg100.txt +katie pg3200.txt +katy pg3200.txt +katy; pg3200.txt +katzenyammer." pg3200.txt +kau, pg3200.txt +kauai. pg3200.txt +kaufmannes pg3200.txt +kaufmanns, pg3200.txt +kawakawa. pg3200.txt +kay pg3200.txt +kay, pg3200.txt +kay. pg3200.txt +kazarks." pg3200.txt +kean pg3200.txt +kearney, pg3200.txt +keech, pg100.txt +keel, pg100.txt +keelboating pg3200.txt +keelboatman pg3200.txt +keeled pg3200.txt +keeler, pg3200.txt +keelson pg3200.txt +keen pg3200.txt, pg100.txt +keen, pg100.txt +keen. pg100.txt +keener pg31100.txt +keener, pg3200.txt +keenly pg3200.txt +keenly. pg31100.txt +keenness pg100.txt +keep pg31100.txt, pg3200.txt, pg100.txt +keep! pg100.txt +keep'st pg100.txt +keep, pg3200.txt, pg100.txt +keep. pg3200.txt, pg100.txt +keep." pg31100.txt +keep; pg100.txt +keep? pg100.txt +keep?" pg3200.txt +keeper pg3200.txt, pg100.txt +keeper, pg100.txt +keeper. pg3200.txt, pg100.txt +keeper] pg100.txt +keepers pg3200.txt, pg100.txt +keepers. pg100.txt +keepers; pg3200.txt +keepin' pg3200.txt +keeping pg31100.txt, pg3200.txt, pg100.txt +keeping. pg3200.txt, pg100.txt +keeping." pg31100.txt, pg3200.txt +keeping.' pg3200.txt +keeps pg31100.txt, pg3200.txt, pg100.txt +keeps, pg100.txt +keeps. pg100.txt +keeps? pg100.txt +keepsake, pg3200.txt +keepsake. pg3200.txt +keepsakes pg3200.txt +keepsakes. pg3200.txt +keg pg3200.txt +kegs pg3200.txt +keine. pg3200.txt +keller, pg3200.txt +kellgren pg3200.txt +kellnerin pg3200.txt +kellynch pg31100.txt +kellynch, pg31100.txt +kellynch. pg31100.txt +kellynch." pg31100.txt +kellynch; pg31100.txt +kelt. pg3200.txt +kelts pg3200.txt +kemble, pg3200.txt +kempthorne, pg3200.txt +ken." pg3200.txt +kennan's pg3200.txt +kennebunkport pg3200.txt +kennels!" pg3200.txt +kent pg31100.txt, pg3200.txt, pg100.txt +kent! pg100.txt +kent, pg100.txt +kent- pg100.txt +kent. pg31100.txt, pg100.txt +kent." pg3200.txt +kent? pg100.txt +kent?" pg31100.txt +kent] pg100.txt +kent]. pg100.txt +kentish pg100.txt +kentishman, pg100.txt +kents' pg3200.txt +kentuckian; pg3200.txt +kentucky pg3200.txt +kentucky, pg3200.txt +keokuk pg3200.txt +keokuk! pg3200.txt +keokuk's, pg3200.txt +keokuk, pg3200.txt +keokuk. pg3200.txt +keokuk: pg3200.txt +keokukians. pg3200.txt +kep' pg3200.txt +kep'him pg3200.txt +kept pg31100.txt, pg3200.txt, pg100.txt +kept! pg100.txt +kept, pg31100.txt, pg3200.txt, pg100.txt +kept- pg100.txt +kept--i pg31100.txt +kept. pg3200.txt, pg100.txt +kept: pg100.txt +kept; pg100.txt +kerchiefs pg3200.txt +kern, pg100.txt +kernel pg3200.txt, pg100.txt +kernel, pg100.txt +kernel. pg100.txt +kernels. pg100.txt +kerns pg100.txt +kerns, pg100.txt +kerr. pg3200.txt +ketch pg3200.txt +ketched pg3200.txt +ketching?" pg3200.txt +ketmehr, pg3200.txt +kettle pg3200.txt +key pg31100.txt, pg3200.txt, pg100.txt +key!" pg31100.txt +key), pg3200.txt +key, pg31100.txt, pg3200.txt, pg100.txt +key- pg100.txt +key. pg31100.txt, pg3200.txt +key." pg3200.txt +key.' pg100.txt +key; pg3200.txt, pg100.txt +keys pg100.txt +keys. pg3200.txt, pg100.txt +keys." pg3200.txt +keys; pg100.txt +keys] pg100.txt +keystone pg3200.txt +khan pg3200.txt +khan, pg3200.txt +kibe, pg100.txt +kibes? pg100.txt +kick pg31100.txt, pg3200.txt +kick!" pg3200.txt +kick, pg3200.txt +kick-a-poos?" pg3200.txt +kick. pg3200.txt +kick; pg100.txt +kick?" pg3200.txt +kicked pg31100.txt, pg3200.txt +kicked." pg3200.txt +kicking pg3200.txt +kicking." pg3200.txt +kicks pg3200.txt +kicks. pg3200.txt +kid pg3200.txt +kid-glove, pg3200.txt +kid-gloved pg3200.txt +kidnapped pg3200.txt +kidney-fat. pg3200.txt +kidneys." pg3200.txt +kids pg3200.txt +kids, pg3200.txt +kids--now pg3200.txt +kiefer: pg3200.txt +kilauea pg3200.txt +kildren. pg3200.txt +kill pg31100.txt, pg3200.txt, pg100.txt +kill! pg100.txt +kill'd pg100.txt +kill'd! pg100.txt +kill'd, pg100.txt +kill'd. pg100.txt +kill'd; pg100.txt +kill'd? pg100.txt +kill, pg100.txt +kill-courtesy. pg100.txt +kill. pg3200.txt, pg100.txt +kill? pg100.txt +killanoola pg3200.txt +killed pg31100.txt, pg3200.txt, pg100.txt +killed!' pg3200.txt +killed, pg3200.txt +killed. pg3200.txt +killed." pg31100.txt, pg3200.txt +killed.' pg3200.txt +killed; pg3200.txt +killed? pg3200.txt +killed?" pg3200.txt +killing pg3200.txt +killing, pg3200.txt +killing--other pg3200.txt +killing. pg100.txt +killing." pg3200.txt +killing? pg100.txt +killingworth pg100.txt +killingworth. pg100.txt +kills pg3200.txt, pg100.txt +kills. pg100.txt +kills] pg3200.txt +kiln-dry pg3200.txt +kiln-hole. pg100.txt +kimberley pg3200.txt +kimberley, pg3200.txt +kimbolton pg100.txt +kimbolton, pg100.txt +kin pg3200.txt, pg100.txt +kin, pg3200.txt, pg100.txt +kin- pg100.txt +kin. pg3200.txt, pg100.txt +kin." pg3200.txt +kin; pg3200.txt, pg100.txt +kinchinjunga pg3200.txt +kind pg31100.txt, pg3200.txt, pg100.txt +kind! pg100.txt +kind!" pg31100.txt, pg3200.txt +kind) pg100.txt +kind, pg31100.txt, pg3200.txt, pg100.txt +kind--and pg3200.txt +kind--as pg3200.txt +kind--the pg3200.txt +kind-faced pg3200.txt +kind-hearted pg31100.txt, pg3200.txt +kind-hearted, pg3200.txt +kind-heartedness." pg3200.txt +kind-looking pg3200.txt +kind. pg31100.txt, pg3200.txt, pg100.txt +kind." pg31100.txt, pg3200.txt +kind.' pg3200.txt +kind; pg31100.txt, pg3200.txt, pg100.txt +kind? pg3200.txt, pg100.txt +kind?" pg3200.txt +kind?' pg3200.txt +kindchen! pg3200.txt +kinder pg31100.txt +kinderbewahrungsanstalten. pg3200.txt +kindest, pg31100.txt +kindle pg3200.txt +kindled pg3200.txt +kindled, pg3200.txt +kindled. pg3200.txt, pg100.txt +kindlier pg3200.txt +kindliness, pg3200.txt +kindling pg3200.txt +kindlings. pg3200.txt +kindly pg31100.txt, pg3200.txt, pg100.txt +kindly!) pg31100.txt +kindly, pg31100.txt, pg3200.txt, pg100.txt +kindly--"my pg3200.txt +kindly. pg100.txt +kindly: pg3200.txt +kindness pg31100.txt, pg3200.txt, pg100.txt +kindness! pg31100.txt, pg100.txt +kindness, pg31100.txt, pg3200.txt, pg100.txt +kindness," pg31100.txt, pg3200.txt +kindness. pg31100.txt, pg3200.txt, pg100.txt +kindness." pg31100.txt +kindness: pg100.txt +kindness; pg31100.txt, pg3200.txt, pg100.txt +kindnesses pg100.txt +kindnesses, pg100.txt +kindred pg3200.txt, pg100.txt +kindred, pg3200.txt, pg100.txt +kindred. pg100.txt +kindred; pg100.txt +kinds pg3200.txt +kinds, pg3200.txt, pg100.txt +kine pg3200.txt +king pg31100.txt, pg3200.txt, pg100.txt +king! pg3200.txt, pg100.txt +king!" pg3200.txt +king!' pg3200.txt, pg100.txt +king!-perhaps- pg100.txt +king'- pg100.txt +king'? pg100.txt +king'd, pg100.txt +king's pg31100.txt, pg3200.txt, pg100.txt +king's, pg100.txt +king's-evil pg3200.txt +king's-weston pg31100.txt +king's. pg100.txt +king, pg31100.txt, pg3200.txt, pg100.txt +king- pg100.txt +king--" pg3200.txt +king--before pg3200.txt +king--drink!" pg3200.txt +king--the pg3200.txt +king-at-arms pg100.txt +king-cardinal, pg100.txt +king. pg3200.txt, pg100.txt +king." pg3200.txt +king.' pg3200.txt, pg100.txt +king: pg3200.txt, pg100.txt +king; pg31100.txt, pg3200.txt, pg100.txt +king? pg31100.txt, pg100.txt +king?" pg3200.txt +king] pg3200.txt +king_henry_viii|epilogue pg100.txt +kingdom pg31100.txt, pg3200.txt, pg100.txt +kingdom! pg3200.txt +kingdom, pg3200.txt, pg100.txt +kingdom. pg31100.txt, pg3200.txt, pg100.txt +kingdom." pg31100.txt, pg3200.txt +kingdom: pg3200.txt, pg100.txt +kingdom; pg3200.txt +kingdom? pg100.txt +kingdoms pg100.txt +kingdoms, pg100.txt +kingdoms--wool pg3200.txt +kingdoms. pg3200.txt +kingly pg100.txt +kings pg3200.txt, pg100.txt +kings! pg100.txt +kings!" pg3200.txt +kings, pg31100.txt, pg3200.txt, pg100.txt +kings," pg3200.txt +kings. pg3200.txt, pg100.txt +kings." pg3200.txt +kings: pg100.txt +kings; pg3200.txt, pg100.txt +kings? pg100.txt +kingship pg3200.txt +kingship. pg3200.txt +kingship; pg3200.txt +kingston. pg31100.txt +kingston." pg31100.txt +kingsweston." pg31100.txt +kinney pg3200.txt +kinred. pg100.txt +kinship pg3200.txt +kinship, pg3200.txt +kinship. pg3200.txt +kinsman pg100.txt +kinsman's. pg100.txt +kinsman, pg100.txt +kinsman. pg100.txt +kinsman; pg100.txt +kinsmen pg3200.txt +kinsmen, pg100.txt +kinsmen. pg100.txt +kinswoman? pg100.txt +kip.' pg3200.txt +kipling pg3200.txt +kipling, pg3200.txt +kipling. pg3200.txt +kirk." pg3200.txt +kirkcudbright pg3200.txt +kirkham's pg3200.txt +kiss pg31100.txt, pg3200.txt, pg100.txt +kiss! pg100.txt +kiss!' pg3200.txt +kiss'd pg100.txt +kiss'd, pg100.txt +kiss'd; pg100.txt +kiss, pg31100.txt, pg100.txt +kiss- pg100.txt +kiss--it pg3200.txt +kiss. pg3200.txt, pg100.txt +kiss.' pg100.txt +kiss; pg100.txt +kissed pg31100.txt, pg3200.txt +kissed. pg3200.txt +kisses pg3200.txt, pg100.txt +kisses, pg31100.txt, pg3200.txt, pg100.txt +kisses- pg100.txt +kisses. pg3200.txt, pg100.txt +kisses." pg3200.txt +kissing pg3200.txt, pg100.txt +kissing, pg100.txt +kissing-which pg3200.txt +kissing. pg100.txt +kitchen pg3200.txt +kitchen!" pg3200.txt +kitchen, pg31100.txt, pg3200.txt +kitchen-boys." pg3200.txt +kitchen-wench, pg100.txt +kitchen. pg31100.txt, pg3200.txt +kitchen." pg3200.txt +kitchen.'--'oh! pg31100.txt +kitchens pg100.txt +kitchens, pg100.txt +kitchens?" pg3200.txt +kite pg31100.txt, pg3200.txt, pg100.txt +kite, pg3200.txt +kite-strings, pg3200.txt +kite; pg3200.txt +kites pg100.txt +kitten pg3200.txt +kitten. pg3200.txt +kitten." pg3200.txt +kittens--hang pg3200.txt +kittens. pg3200.txt +kittery pg3200.txt +kitty pg31100.txt +kitty's pg31100.txt +kitty, pg31100.txt +kitty. pg31100.txt +klein, pg3200.txt +kleinigkeit pg3200.txt +kleinigkeit. pg3200.txt +klugenstein's pg3200.txt +knack- pg100.txt +knapp'd pg100.txt +knapsack pg31100.txt +knapsack. pg3200.txt +knapsack." pg3200.txt +knapsacks pg3200.txt +knav'ry. pg100.txt +knave pg31100.txt, pg3200.txt, pg100.txt +knave! pg100.txt +knave'; pg100.txt +knave's pg100.txt +knave, pg100.txt +knave- pg100.txt +knave--farewell." pg3200.txt +knave. pg100.txt +knave.' pg100.txt +knave; pg100.txt +knave? pg100.txt +knavery pg100.txt +knavery! pg100.txt +knavery. pg100.txt +knaves pg100.txt +knaves! pg100.txt +knaves, pg100.txt +knaves. pg100.txt +knaves; pg100.txt +knee pg100.txt +knee! pg100.txt +knee, pg3200.txt, pg100.txt +knee-caps pg3200.txt +knee-deep pg3200.txt +knee. pg3200.txt, pg100.txt +knee.'" pg3200.txt +knee; pg3200.txt, pg100.txt +knee? pg3200.txt, pg100.txt +knee?" pg3200.txt +kneel pg3200.txt, pg100.txt +kneel'd, pg100.txt +kneel, pg100.txt +kneel--place pg3200.txt +kneel. pg100.txt +kneel: pg100.txt +kneel; pg100.txt +kneel? pg100.txt +kneel?" pg3200.txt +kneel] pg100.txt +kneeled pg3200.txt +kneeling pg3200.txt, pg100.txt +kneeling, pg3200.txt +kneels pg3200.txt, pg100.txt +kneels. pg100.txt +kneels] pg100.txt +knees pg3200.txt, pg100.txt +knees! pg100.txt +knees!" pg3200.txt +knees, pg31100.txt, pg3200.txt, pg100.txt +knees--stirred pg3200.txt +knees. pg3200.txt, pg100.txt +knees." pg3200.txt +knees; pg3200.txt, pg100.txt +knell pg100.txt +knell: pg100.txt +knelt pg3200.txt +knelt. pg3200.txt +knew pg31100.txt, pg3200.txt, pg100.txt +knew! pg31100.txt +knew!" pg31100.txt, pg3200.txt +knew'st pg100.txt +knew'st, pg100.txt +knew, pg31100.txt, pg3200.txt +knew. pg3200.txt, pg100.txt +knew." pg3200.txt +knew; pg3200.txt +knewest, pg100.txt +knickknacks pg3200.txt +knife pg3200.txt, pg100.txt +knife, pg3200.txt, pg100.txt +knife. pg3200.txt, pg100.txt +knife." pg3200.txt +knife; pg3200.txt +knife? pg100.txt +knife?" pg3200.txt +knife] pg100.txt +knight pg3200.txt, pg100.txt +knight! pg3200.txt, pg100.txt +knight'- pg100.txt +knight's pg3200.txt, pg100.txt +knight, pg3200.txt, pg100.txt +knight," pg3200.txt +knight-errantry pg31100.txt, pg3200.txt +knight. pg3200.txt, pg100.txt +knight: pg100.txt +knight; pg3200.txt, pg100.txt +knight? pg100.txt +knight?" pg3200.txt +knight] pg100.txt +knighted. pg100.txt +knighthood pg3200.txt, pg100.txt +knightley pg31100.txt +knightley!--i pg31100.txt +knightley's pg31100.txt +knightley's, pg31100.txt +knightley, pg31100.txt +knightley--or pg31100.txt +knightley--what pg31100.txt +knightley. pg31100.txt +knightley." pg31100.txt +knightley.' pg31100.txt +knightley.--"it pg31100.txt +knightley.--neither pg31100.txt +knightley; pg31100.txt +knightley?" pg31100.txt +knightleys, pg31100.txt +knightleys; pg31100.txt +knights pg3200.txt, pg100.txt +knights, pg3200.txt, pg100.txt +knights. pg3200.txt, pg100.txt +knights." pg3200.txt +knights: pg3200.txt +knights? pg100.txt +knights?" pg3200.txt +knightsbridge, pg3200.txt +knit pg100.txt +knit, pg31100.txt, pg100.txt +knit. pg100.txt +knit.' pg100.txt +knit; pg100.txt +knitting pg31100.txt, pg3200.txt +knitting-needles pg3200.txt +knitting. pg3200.txt +knitting; pg3200.txt +knives pg31100.txt, pg100.txt +knives, pg3200.txt, pg100.txt +knives. pg3200.txt +knives: pg100.txt +knob pg3200.txt +knob! pg3200.txt +knob, pg3200.txt +knobs pg3200.txt +knobs- pg3200.txt +knock pg31100.txt, pg3200.txt, pg100.txt +knock! pg100.txt +knock'd pg100.txt +knock, pg3200.txt +knock-kneed pg3200.txt +knock. pg100.txt +knock? pg100.txt +knocked pg31100.txt, pg3200.txt +knocking pg31100.txt, pg3200.txt, pg100.txt +knocking! pg3200.txt +knocking. pg100.txt +knocking? pg100.txt +knocks pg3200.txt, pg100.txt +knocks! pg100.txt +knocks. pg3200.txt, pg100.txt +knocks? pg100.txt +knocks] pg100.txt +knod pg3200.txt +knoll pg31100.txt, pg3200.txt +knoll'd. pg100.txt +knoll." pg31100.txt +knot pg3200.txt +knot, pg100.txt +knot. pg3200.txt, pg100.txt +knot; pg100.txt +knots pg3200.txt +knots, pg100.txt +knots- pg100.txt +knotted pg3200.txt +knotty pg3200.txt +know!" pg3200.txt +know!' pg3200.txt +know" pg3200.txt +know'st pg100.txt +know'st- pg100.txt +know'st. pg100.txt +know'st; pg100.txt +know'st? pg100.txt +know't, pg100.txt +know't. pg100.txt +know): pg3200.txt +know, pg31100.txt, pg3200.txt, pg100.txt +know," pg3200.txt +know- pg100.txt +know-- pg3200.txt +know--" pg3200.txt +know----" pg3200.txt +know--and pg3200.txt +know--but pg3200.txt +know--can't pg3200.txt +know--in pg31100.txt +know--maybe pg3200.txt +know--no, pg31100.txt +know--plain pg3200.txt +know--repudiate pg3200.txt +know--suppose pg3200.txt +know--that pg3200.txt +know--the pg3200.txt +know-nothingism. pg3200.txt +know. pg31100.txt, pg3200.txt, pg100.txt +know." pg31100.txt, pg3200.txt +know.' pg3200.txt, pg100.txt +know.] pg3200.txt +know: pg3200.txt, pg100.txt +know; pg31100.txt, pg3200.txt, pg100.txt +know? pg31100.txt, pg3200.txt, pg100.txt +know?" pg31100.txt, pg3200.txt +know?' pg3200.txt +know?--what pg3200.txt +knowed pg3200.txt +knowed. pg3200.txt +knower; pg100.txt +knowest pg3200.txt, pg100.txt +knowest, pg100.txt +knowest. pg100.txt +knowest." pg3200.txt +knoweth pg3200.txt +knoweth." pg3200.txt +knowing pg31100.txt, pg3200.txt, pg100.txt +knowing, pg31100.txt, pg100.txt +knowing. pg31100.txt, pg3200.txt +knowing." pg31100.txt +knowing; pg100.txt +knowingly pg3200.txt +knowingly. pg100.txt +knowings. pg100.txt +knowledge pg31100.txt, pg3200.txt, pg100.txt +knowledge! pg100.txt +knowledge, pg3200.txt, pg100.txt +knowledge," pg3200.txt +knowledge- pg100.txt +knowledge--detailed pg3200.txt +knowledge. pg31100.txt, pg3200.txt, pg100.txt +knowledge." pg3200.txt +knowledge: pg100.txt +knowledge; pg31100.txt +knowledge? pg100.txt +known pg31100.txt, pg3200.txt, pg100.txt +known! pg100.txt +known, pg31100.txt, pg3200.txt, pg100.txt +known. pg31100.txt, pg3200.txt, pg100.txt +known." pg31100.txt, pg3200.txt +known: pg100.txt +known; pg31100.txt, pg3200.txt, pg100.txt +known? pg100.txt +known?" pg3200.txt +knows pg31100.txt, pg3200.txt, pg100.txt +knows!- pg100.txt +knows, pg31100.txt, pg3200.txt, pg100.txt +knows-- pg3200.txt +knows. pg3200.txt, pg100.txt +knows." pg3200.txt +knows? pg3200.txt, pg100.txt +knows?--somewhere pg3200.txt +knuckle-one, pg3200.txt +knuckles pg3200.txt +knuckles. pg3200.txt +kodak'd pg3200.txt +kodaking--and pg3200.txt +koenigsstuhl pg3200.txt +kohala, pg3200.txt +kohala.' pg3200.txt +komm! pg3200.txt +kommen pg3200.txt +kommen! pg3200.txt +kona, pg3200.txt +kondoparinga pg3200.txt +kongorong pg3200.txt +konnen pg3200.txt +konnen? pg3200.txt +koolywurtie pg3200.txt +koorbash pg3200.txt +kooringa pg3200.txt +kopek, pg3200.txt +koppio pg3200.txt +kornerstrasse, pg3200.txt +kossuth pg3200.txt +kossuth. pg3200.txt +kotdee pg3200.txt +kotlugja, pg3200.txt +kraut pg3200.txt +krishni, pg3200.txt +krout: pg3200.txt +kruger's pg3200.txt +kruger's, pg3200.txt +kruger, pg3200.txt +kruger-- pg3200.txt +krugersdorp pg3200.txt +kuitpo pg3200.txt +kumen, pg3200.txt +kursaal--whatever pg3200.txt +ky. pg3200.txt +kydd; pg3200.txt +kyer. pg3200.txt +kympton?" pg31100.txt +l'emplit pg3200.txt +l'emporta pg3200.txt +l'envoy, pg100.txt +l'envoy. pg100.txt +l'envoy; pg100.txt +l'etoile. pg3200.txt +l'honneur." pg3200.txt +l'obligeance pg3200.txt +l'un pg3200.txt +l'universite, pg3200.txt +l--" pg3200.txt +l. pg31100.txt, pg3200.txt, pg100.txt +l., pg31100.txt, pg3200.txt +l.] pg3200.txt +l.e." pg3200.txt +l11,000." pg3200.txt +l20 pg3200.txt +l20. pg3200.txt +l240 pg3200.txt +l25 pg3200.txt +l5 pg3200.txt +l500,000 pg3200.txt +la pg3200.txt, pg100.txt +la! pg31100.txt, pg100.txt +la, pg3200.txt +la. pg100.txt +la? pg100.txt +labboard!" pg3200.txt +label pg3200.txt +label. pg3200.txt +label." pg3200.txt +labeled pg3200.txt +labeled: pg3200.txt +labeling pg3200.txt +labels pg3200.txt +labels, pg3200.txt +labienus- pg100.txt +labor pg31100.txt, pg3200.txt, pg100.txt +labor, pg3200.txt +labor- pg100.txt +labor-union, pg3200.txt +labor. pg3200.txt, pg100.txt +labor." pg3200.txt +laboratories, pg3200.txt +laboratory pg3200.txt +laboratory. pg3200.txt +laboratory?" pg3200.txt +labored pg3200.txt +laborer, pg3200.txt +laborers pg3200.txt +laboring pg3200.txt +laborious pg3200.txt +laboriously pg3200.txt +laboriously, pg3200.txt +laboriously: pg3200.txt +labors pg3200.txt +labors, pg3200.txt, pg100.txt +labors. pg3200.txt +labors." pg3200.txt +labour pg31100.txt, pg100.txt +labour'd pg100.txt +labour'd, pg100.txt +labour, pg100.txt +labour. pg100.txt +labour; pg100.txt +labour? pg100.txt +laboured pg100.txt +labourers, pg31100.txt +labourers; pg31100.txt +labours pg31100.txt +labours, pg100.txt +labours. pg100.txt +labyrinth pg100.txt +labyrinth: pg100.txt +lac'd pg100.txt +lac'd; pg100.txt +lace!--now pg31100.txt +lace-work pg3200.txt +lace; pg3200.txt +laced pg3200.txt +lacerate pg3200.txt +lacerated pg3200.txt +lacerates. pg3200.txt +laces. pg100.txt +lacey pg31100.txt +lacey. pg31100.txt +lacey." pg31100.txt +lacies- pg100.txt +lacing pg31100.txt +lack pg3200.txt, pg100.txt +lack'st. pg100.txt +lack, pg100.txt +lack. pg3200.txt, pg100.txt +lack?" pg3200.txt +lacked pg3200.txt +lacked, pg3200.txt +lacked. pg3200.txt +lackey, pg100.txt +lackey. pg100.txt +lackeys, pg3200.txt +lackeys. pg3200.txt +lacking pg3200.txt, pg100.txt +lacking, pg3200.txt +lacking. pg3200.txt +lackland pg3200.txt +lackland's pg3200.txt +lackland?" pg3200.txt +lacks pg3200.txt, pg100.txt +laconia; pg31100.txt +lad pg3200.txt, pg100.txt +lad! pg3200.txt, pg100.txt +lad!" pg3200.txt +lad's pg3200.txt +lad, pg3200.txt, pg100.txt +lad. pg3200.txt, pg100.txt +lad." pg3200.txt +lad? pg100.txt +lad?" pg3200.txt +ladder pg3200.txt, pg100.txt +ladder, pg3200.txt, pg100.txt +ladder. pg3200.txt, pg100.txt +ladder." pg3200.txt +ladder; pg3200.txt +ladders pg3200.txt +ladders, pg3200.txt +laden pg3200.txt +laden. pg3200.txt +ladies pg31100.txt, pg3200.txt, pg100.txt +ladies' pg31100.txt, pg3200.txt +ladies, pg31100.txt, pg3200.txt, pg100.txt +ladies--generally pg3200.txt +ladies. pg31100.txt, pg3200.txt, pg100.txt +ladies." pg31100.txt, pg3200.txt +ladies.' pg3200.txt +ladies; pg3200.txt, pg100.txt +ladies? pg100.txt +ladies?" pg3200.txt +ladri pg3200.txt +lads pg3200.txt, pg100.txt +lads, pg3200.txt, pg100.txt +lads. pg100.txt +lads." pg3200.txt +ladvenue, pg3200.txt +lady pg31100.txt, pg3200.txt, pg100.txt +lady! pg31100.txt, pg100.txt +lady' pg100.txt +lady's pg31100.txt, pg3200.txt, pg100.txt +lady's, pg31100.txt, pg100.txt +lady's. pg100.txt +lady, pg31100.txt, pg3200.txt, pg100.txt +lady- pg100.txt +lady-- pg31100.txt, pg3200.txt +lady--it pg3200.txt +lady--nothing pg31100.txt +lady-bug pg3200.txt +lady-comers. pg3200.txt +lady. pg31100.txt, pg3200.txt, pg100.txt +lady." pg31100.txt, pg3200.txt +lady.' pg3200.txt +lady: pg100.txt +lady; pg31100.txt, pg100.txt +lady? pg100.txt +lady?" pg31100.txt, pg3200.txt +lady?"... pg31100.txt +lady] pg100.txt +ladybird! pg100.txt +ladyship pg31100.txt, pg3200.txt, pg100.txt +ladyship! pg100.txt +ladyship's pg31100.txt, pg100.txt +ladyship, pg31100.txt +ladyship," pg31100.txt +ladyship. pg31100.txt, pg100.txt +ladyship? pg100.txt +laertes pg100.txt +laertes, pg100.txt +laertes. pg100.txt +laertes? pg100.txt +laertes]. pg100.txt +lafayette." pg3200.txt +lafayettes, pg3200.txt +lafeu pg100.txt +lafeu, pg100.txt +lafeu? pg100.txt +lafeu] pg100.txt +laff pg3200.txt +laffan--old, pg3200.txt +laffan. pg3200.txt +lagging pg3200.txt +lagniappe.' pg3200.txt +lahaina pg3200.txt +laid pg31100.txt, pg3200.txt, pg100.txt +laid, pg100.txt +laid. pg100.txt +laid; pg100.txt +lain pg3200.txt +lain, pg3200.txt +lais, pg3200.txt +laity, pg31100.txt +lake pg31100.txt, pg3200.txt, pg100.txt +lake! pg3200.txt +lake, pg3200.txt, pg100.txt +lake," pg3200.txt +lake--and pg3200.txt +lake--therefore pg3200.txt +lake. pg3200.txt +lake." pg3200.txt +lake; pg3200.txt, pg100.txt +lake?" pg3200.txt +lakes pg31100.txt, pg3200.txt +lakes, pg3200.txt, pg100.txt +lakes." pg31100.txt +lakes; pg3200.txt +lakeside pg3200.txt +lalor pg3200.txt +lam pg3200.txt +lamanites, pg3200.txt +lamb pg3200.txt, pg100.txt +lamb! pg100.txt +lamb, pg3200.txt, pg100.txt +lamb. pg100.txt +lamb; pg100.txt +lambrequins; pg3200.txt +lambs pg3200.txt, pg100.txt +lambs! pg100.txt +lambs. pg100.txt +lambton, pg31100.txt +lambton?" pg31100.txt +lame pg3200.txt +lame, pg3200.txt, pg100.txt +lame,' pg3200.txt +lame. pg3200.txt +lame; pg31100.txt, pg3200.txt +lame? pg100.txt +lame?" pg3200.txt +lamely pg3200.txt +lameness! pg100.txt +lament pg31100.txt, pg3200.txt, pg100.txt +lament! pg100.txt +lament'st. pg100.txt +lament, pg31100.txt, pg100.txt +lament. pg100.txt +lament; pg100.txt +lamentable pg100.txt +lamentably. pg100.txt +lamentation pg100.txt +lamentation, pg100.txt +lamentation. pg100.txt +lamentation; pg100.txt +lamentations pg31100.txt +lamentations, pg3200.txt +lamentations. pg3200.txt +lamentations." pg31100.txt +lamentations._] pg31100.txt +lamentations? pg3200.txt +lamented pg3200.txt, pg100.txt +lamenting, pg31100.txt, pg3200.txt +lamenting. pg3200.txt +laments pg100.txt +laments, pg100.txt +laments; pg100.txt +laming pg100.txt +lammastide? pg100.txt +lamound. pg100.txt +lamp pg31100.txt, pg3200.txt +lamp, pg3200.txt, pg100.txt +lamp-post pg3200.txt +lamp-posts. pg3200.txt +lamp. pg31100.txt, pg3200.txt, pg100.txt +lamp." pg3200.txt +lamp: pg3200.txt +lamp? pg3200.txt +lamp?" pg3200.txt +lamps pg3200.txt +lamps, pg3200.txt +lamps. pg3200.txt +lampton pg3200.txt +lan! pg3200.txt +lan', pg3200.txt +lan--napoleon--now pg3200.txt +lancaster pg100.txt +lancaster! pg100.txt +lancaster!'] pg100.txt +lancaster'; pg100.txt +lancaster, pg100.txt +lancaster. pg100.txt +lancaster; pg100.txt +lancaster? pg100.txt +lancastrians, pg31100.txt +lance pg3200.txt, pg100.txt +lance!" pg3200.txt +lance, pg100.txt +lance-point pg3200.txt +lance. pg100.txt +lance." pg3200.txt +lance: pg100.txt +lance; pg100.txt +lances pg3200.txt +land pg31100.txt, pg3200.txt, pg100.txt +land! pg3200.txt, pg100.txt +land!" pg3200.txt +land'; pg100.txt +land) pg3200.txt +land), pg3200.txt +land, pg31100.txt, pg3200.txt, pg100.txt +land,) pg3200.txt +land- pg100.txt +land--" pg3200.txt +land----" pg3200.txt +land--we pg3200.txt +land-boom. pg3200.txt +land-slides pg3200.txt +land. pg3200.txt, pg100.txt +land." pg3200.txt +land.' pg31100.txt +land: pg100.txt +land; pg3200.txt, pg100.txt +land? pg3200.txt, pg100.txt +land?" pg3200.txt +landaulette; pg31100.txt +landed pg3200.txt, pg100.txt +landed, pg3200.txt, pg100.txt +landed. pg3200.txt, pg100.txt +landed." pg3200.txt +landed: pg100.txt +landholder, pg31100.txt +landing pg3200.txt +landing, pg3200.txt +landing-place pg31100.txt +landing. pg3200.txt +landing." pg3200.txt +landing: pg3200.txt +landing?" pg3200.txt +landings, pg3200.txt +landlady pg3200.txt +landlady, pg3200.txt +landlady; pg3200.txt +landlord pg3200.txt +landlord" pg3200.txt +landlord's pg3200.txt +landlord, pg3200.txt +landlord-- pg3200.txt +landlord. pg3200.txt, pg100.txt +landmark pg3200.txt +landmark, pg3200.txt +landmark. pg3200.txt +landmarks pg3200.txt +lands pg3200.txt, pg100.txt +lands, pg3200.txt, pg100.txt +lands. pg3200.txt, pg100.txt +lands; pg3200.txt, pg100.txt +landscape pg31100.txt, pg3200.txt +landscape, pg3200.txt +landscape--abroad pg3200.txt +landscape-painting. pg3200.txt +landscape. pg31100.txt, pg3200.txt +landscape: pg3200.txt +landscapes, pg3200.txt +landseer's pg3200.txt +landseer. pg3200.txt +landslides pg3200.txt +landslip pg3200.txt +landsmannin.... pg3200.txt +lane pg31100.txt, pg3200.txt, pg100.txt +lane's pg100.txt +lane, pg31100.txt, pg3200.txt, pg100.txt +lane. pg31100.txt +lane." pg31100.txt, pg3200.txt +lane.' pg3200.txt +lane; pg3200.txt +lane? pg100.txt +lanes pg31100.txt, pg3200.txt, pg100.txt +lanes, pg3200.txt, pg100.txt +lang. pg3200.txt +langage. pg100.txt +langdon pg3200.txt +langdon's pg3200.txt +langdon, pg3200.txt +langdon. pg3200.txt +langeman, pg3200.txt +langford pg31100.txt +langford! pg31100.txt +langford. pg31100.txt +langham pg3200.txt +langland pg3200.txt +langomarganbl----" pg3200.txt +language pg31100.txt, pg3200.txt, pg100.txt +language! pg100.txt +language"; pg3200.txt +language, pg31100.txt, pg3200.txt, pg100.txt +language--best pg3200.txt +language--why pg3200.txt +language. pg31100.txt, pg3200.txt, pg100.txt +language." pg31100.txt, pg3200.txt +language: pg3200.txt +language; pg3200.txt +language?" pg3200.txt +language] pg3200.txt +languages pg3200.txt +languages, pg3200.txt, pg100.txt +languages. pg3200.txt +languages." pg3200.txt +languid pg31100.txt, pg3200.txt +languidly pg3200.txt +languidly: pg3200.txt +languish pg3200.txt, pg100.txt +languish. pg100.txt +languish? pg100.txt +languishing pg31100.txt, pg3200.txt +languishment pg100.txt +languor pg31100.txt, pg3200.txt +languor, pg31100.txt +lanigan. pg3200.txt +lansdown pg31100.txt +lantern pg3200.txt +lantern! pg3200.txt +lantern!" pg3200.txt +lantern, pg3200.txt +lantern--" pg3200.txt +lantern--do pg31100.txt +lantern--it pg3200.txt +lantern-jawed, pg3200.txt +lantern. pg3200.txt +lantern; pg100.txt +lanterns pg3200.txt +lanterns, pg3200.txt +lanthorn pg100.txt +laocoon pg3200.txt +laocoon. pg3200.txt +laocoon; pg3200.txt +lap pg3200.txt, pg100.txt +lap, pg31100.txt, pg3200.txt, pg100.txt +lap-dog pg3200.txt +lap-robe pg3200.txt +lap. pg3200.txt, pg100.txt +lap; pg3200.txt +lap? pg100.txt +lapel pg3200.txt +lapels pg3200.txt +lapis. pg100.txt +laplace pg3200.txt +lapp'd pg100.txt +lapping pg3200.txt +laprobe." pg3200.txt +laps, pg3200.txt, pg100.txt +lapse pg3200.txt, pg100.txt +lapse, pg3200.txt +lapse." pg3200.txt +lapsed pg3200.txt +larboard! pg3200.txt +larcenies. pg3200.txt +larceny pg3200.txt +larded pg100.txt +larder.] pg100.txt +large pg31100.txt, pg3200.txt, pg100.txt +large, pg31100.txt, pg3200.txt, pg100.txt +large," pg31100.txt +large- pg100.txt +large. pg3200.txt, pg100.txt +large." pg31100.txt, pg3200.txt +large; pg3200.txt +largely pg31100.txt, pg3200.txt +larger pg31100.txt, pg3200.txt +larger. pg3200.txt +larger." pg3200.txt +larger; pg3200.txt +largess pg3200.txt +largest pg31100.txt, pg3200.txt +largest. pg31100.txt +lariat pg3200.txt +lariat?" pg3200.txt +lark pg100.txt +lark, pg3200.txt, pg100.txt +lark. pg100.txt +larkins pg31100.txt +larkins's pg31100.txt +larkins. pg31100.txt +larks. pg100.txt +larks? pg3200.txt +larolles; pg31100.txt +larrikin pg3200.txt +larron! pg100.txt +lartius pg100.txt +lartius, pg100.txt +lartius? pg100.txt +las' pg3200.txt +las'?" pg3200.txt +lascelle's, pg31100.txt +lasciviously pg3200.txt +lash pg3200.txt +lash, pg3200.txt +lashed pg3200.txt +lashes." pg3200.txt +lashes; pg3200.txt +lashin's pg3200.txt +lashin's, pg3200.txt +lashings; pg3200.txt +lass pg3200.txt, pg100.txt +lass, pg100.txt +lass? pg100.txt +lassen. pg3200.txt +lasso, pg3200.txt +last pg31100.txt, pg3200.txt, pg100.txt +last! pg31100.txt, pg3200.txt, pg100.txt +last!" pg3200.txt +last!--but pg31100.txt +last" pg3200.txt +last, pg31100.txt, pg3200.txt, pg100.txt +last,) pg3200.txt +last- pg100.txt +last-- pg31100.txt +last--'go!' pg3200.txt +last--bread pg3200.txt +last--forward! pg3200.txt +last--i pg3200.txt +last--shortly pg3200.txt +last--the pg3200.txt +last--yes. pg3200.txt +last. pg31100.txt, pg3200.txt, pg100.txt +last." pg31100.txt, pg3200.txt +last.' pg3200.txt +last.) pg3200.txt +last: pg3200.txt +last; pg31100.txt, pg3200.txt, pg100.txt +last? pg3200.txt, pg100.txt +last?" pg31100.txt, pg3200.txt +last?' pg3200.txt +lasted pg31100.txt, pg3200.txt +lasted. pg3200.txt +lasting pg31100.txt, pg3200.txt +lasting, pg3200.txt +lasting; pg100.txt +lastly pg3200.txt, pg100.txt +lastly, pg100.txt +lasts pg100.txt +lasts. pg3200.txt, pg100.txt +latch pg3200.txt +latch, pg100.txt +latch; pg3200.txt +latchets, pg3200.txt +late pg31100.txt, pg3200.txt, pg100.txt +late! pg31100.txt, pg3200.txt, pg100.txt +late!" pg3200.txt +late, pg31100.txt, pg3200.txt, pg100.txt +late- pg100.txt +late--all pg3200.txt +late--he's pg3200.txt +late-night pg3200.txt +late. pg31100.txt, pg3200.txt, pg100.txt +late." pg31100.txt, pg3200.txt +late: pg3200.txt +late; pg31100.txt, pg3200.txt, pg100.txt +late? pg31100.txt, pg3200.txt, pg100.txt +late?" pg3200.txt +lately pg31100.txt, pg3200.txt, pg100.txt +lately). pg3200.txt +lately, pg31100.txt, pg3200.txt +lately. pg3200.txt +lately." pg31100.txt, pg3200.txt +lately.'" pg31100.txt +lately.... pg3200.txt +lately; pg3200.txt, pg100.txt +lately? pg3200.txt +lately?" pg31100.txt +lateness pg31100.txt +latent pg3200.txt +later pg31100.txt, pg3200.txt +later) pg3200.txt +later): pg3200.txt +later, pg3200.txt +later-- pg3200.txt +later--" pg3200.txt +later--away pg3200.txt +later--he pg3200.txt +later--when pg3200.txt +later-midnight: pg3200.txt +later. pg3200.txt +later." pg3200.txt +later.] pg3200.txt +later: pg3200.txt +later; pg3200.txt +later?' pg3200.txt +lateran, pg3200.txt +latescence. pg3200.txt +latest pg31100.txt, pg3200.txt +lath, pg100.txt +lath; pg100.txt +lather pg3200.txt +lathers! pg3200.txt +lathers, pg3200.txt +lathers--" pg3200.txt +lathrop pg3200.txt +latin pg3200.txt +latin! pg100.txt +latin, pg3200.txt +latin. pg3200.txt, pg100.txt +latin." pg3200.txt +latin; pg3200.txt +latin?" pg3200.txt +latinist pg3200.txt +latinist's pg3200.txt +latinist. pg3200.txt +latins, pg3200.txt +latitude pg3200.txt +latitude!" pg3200.txt +latitude, pg3200.txt +latitude. pg3200.txt +latitudes pg3200.txt +latitudes, pg3200.txt +latitudes. pg3200.txt +latter pg31100.txt, pg3200.txt +latter's pg3200.txt +latter, pg31100.txt, pg3200.txt +latter--if pg3200.txt +latter-born, pg100.txt +latter-day pg3200.txt +latter. pg31100.txt, pg3200.txt +latterly pg3200.txt +latterly. pg3200.txt +lattice, pg100.txt +lattice-work pg3200.txt +latticework pg3200.txt +laud pg3200.txt +laudations pg3200.txt +laudations. pg3200.txt +laugh pg31100.txt, pg3200.txt, pg100.txt +laugh! pg3200.txt, pg100.txt +laugh' pg3200.txt +laugh'd pg100.txt +laugh'd. pg100.txt +laugh, pg31100.txt, pg3200.txt, pg100.txt +laugh--and pg3200.txt +laugh--but pg3200.txt +laugh--no, pg3200.txt +laugh-maker pg3200.txt +laugh. pg31100.txt, pg3200.txt, pg100.txt +laugh." pg31100.txt, pg3200.txt +laugh: pg3200.txt +laugh; pg31100.txt, pg3200.txt, pg100.txt +laugh? pg100.txt +laughable pg3200.txt +laughable. pg100.txt +laughably pg3200.txt +laughed pg31100.txt, pg3200.txt +laughed, pg3200.txt +laughed. pg31100.txt, pg3200.txt +laughed; pg3200.txt +laughed?" pg3200.txt +laughers. pg3200.txt +laughin', pg3200.txt +laughing pg31100.txt, pg3200.txt +laughing! pg3200.txt, pg100.txt +laughing, pg31100.txt, pg3200.txt +laughing. pg31100.txt, pg3200.txt, pg100.txt +laughing? pg100.txt +laughs pg31100.txt +laughs, pg3200.txt +laughs. pg3200.txt +laughter pg31100.txt, pg3200.txt, pg100.txt +laughter, pg31100.txt, pg3200.txt, pg100.txt +laughter--and pg3200.txt +laughter--just pg3200.txt +laughter. pg31100.txt, pg3200.txt, pg100.txt +laughter.] pg3200.txt +laughter; pg3200.txt, pg100.txt +laughter]. pg3200.txt +launce pg100.txt +launce? pg100.txt +launcelot pg3200.txt, pg100.txt +launcelot's?" pg3200.txt +launcelot, pg3200.txt, pg100.txt +launcelot--" pg3200.txt +launcelot. pg3200.txt, pg100.txt +launcelot; pg3200.txt +launcelot? pg100.txt +launch pg3200.txt +launched pg31100.txt, pg3200.txt +launched. pg3200.txt +launching pg3200.txt +laundress pg100.txt +laundry-door, pg31100.txt +laura pg31100.txt, pg3200.txt +laura!" pg3200.txt +laura's pg3200.txt +laura, pg3200.txt +laura--lucrezia pg3200.txt +laura--tell pg3200.txt +laura. pg31100.txt, pg3200.txt +laura?" pg3200.txt +laurels pg3200.txt +laurels, pg100.txt +laurence. pg100.txt +laurence? pg100.txt +laurentina's pg31100.txt +laurie.] pg3200.txt +laurina pg31100.txt +lava pg3200.txt +lava-floods pg3200.txt +lava-gouts pg3200.txt +lavatory; pg3200.txt +lavinia pg100.txt +lavinia! pg100.txt +lavinia, pg100.txt +lavinia. pg100.txt +lavinia] pg100.txt +lavish pg3200.txt +lavished pg3200.txt +lavishest pg3200.txt +lavishing pg3200.txt +lavishly pg3200.txt, pg100.txt +lavishness pg3200.txt +law pg31100.txt, pg3200.txt, pg100.txt +law! pg100.txt +law!- pg100.txt +law, pg3200.txt, pg100.txt +law- pg100.txt +law-books, pg3200.txt +law-court. pg3200.txt +law-court." pg3200.txt +law-courts pg3200.txt +law-equipment. pg3200.txt +law-makers pg3200.txt +law-makers. pg3200.txt +law-respecting pg3200.txt +law. pg3200.txt, pg100.txt +law." pg31100.txt, pg3200.txt +law.'" pg31100.txt +law.] pg3200.txt +law; pg3200.txt, pg100.txt +law? pg3200.txt, pg100.txt +law?" pg3200.txt +law?' pg3200.txt +lawful pg3200.txt, pg100.txt +lawful, pg3200.txt, pg100.txt +lawful. pg100.txt +lawful? pg100.txt +lawless pg3200.txt +lawlessly. pg100.txt +lawlessness. pg31100.txt +lawn pg31100.txt +lawn. pg31100.txt, pg3200.txt +lawn?" pg31100.txt +lawns pg31100.txt, pg3200.txt +lawr. pg3200.txt +lawrence pg3200.txt +lawrence--" pg3200.txt +lawrence?" pg3200.txt +laws pg31100.txt, pg3200.txt, pg100.txt +laws! pg3200.txt +laws, pg3200.txt, pg100.txt +laws. pg31100.txt, pg3200.txt, pg100.txt +laws; pg3200.txt +lawson pg3200.txt +lawsuit pg3200.txt +lawyer pg3200.txt, pg100.txt +lawyer's pg3200.txt +lawyer, pg31100.txt, pg3200.txt +lawyer--a, pg3200.txt +lawyer--and pg3200.txt +lawyer-ways--and pg3200.txt +lawyer. pg3200.txt +lawyer." pg31100.txt +lawyer; pg100.txt +lawyer? pg3200.txt, pg100.txt +lawyers pg3200.txt +lawyers, pg3200.txt +lawyers. pg3200.txt, pg100.txt +lawyers?" pg3200.txt +lax. pg3200.txt +laxart's pg3200.txt +laxart, pg3200.txt +laxart," pg3200.txt +laxart. pg3200.txt +lay pg31100.txt, pg3200.txt, pg100.txt +lay'st, pg100.txt +lay, pg3200.txt, pg100.txt +lay-- pg3200.txt +lay--if pg3200.txt +lay-out pg3200.txt +lay-out, pg3200.txt +lay. pg3200.txt, pg100.txt +lay: pg100.txt +lay; pg3200.txt, pg100.txt +lay?" pg3200.txt +layer pg3200.txt +layer. pg3200.txt +layer; pg3200.txt +layers, pg3200.txt +layin' pg3200.txt +laying pg3200.txt, pg100.txt +layman, pg3200.txt +layout. pg3200.txt +layout; pg3200.txt +lays pg3200.txt +lays, pg100.txt +lazarus pg3200.txt, pg100.txt +lazarus. pg3200.txt +lazied. pg3200.txt +laziest pg3200.txt +lazily, pg3200.txt +laziness pg3200.txt +laziness, pg3200.txt +lazy pg3200.txt +lazy! pg31100.txt +lazy, pg3200.txt +lazying pg3200.txt +le's pg3200.txt +lead pg31100.txt, pg3200.txt, pg100.txt +lead! pg100.txt +lead!" pg3200.txt +lead!' pg3200.txt +lead'st, pg100.txt +lead, pg3200.txt, pg100.txt +lead," pg3200.txt +lead--but pg3200.txt +lead--worth pg3200.txt +lead. pg31100.txt, pg3200.txt, pg100.txt +lead." pg3200.txt +lead.' pg3200.txt +lead; pg3200.txt +lead?" pg3200.txt +leaded pg3200.txt +leaden pg3200.txt +leader pg3200.txt +leader, pg100.txt +leader. pg3200.txt, pg100.txt +leaders pg3200.txt +leaders, pg3200.txt +leaders. pg100.txt +leaders; pg3200.txt +leadership pg3200.txt +leadeth pg3200.txt +leading pg31100.txt, pg3200.txt +leading] pg100.txt +leads pg31100.txt, pg3200.txt, pg100.txt +leads!--no, pg3200.txt +leads") pg3200.txt +leads. pg3200.txt, pg100.txt +leadsmen. pg3200.txt +leaf pg3200.txt, pg100.txt +leaf, pg3200.txt, pg100.txt +leaf--she pg3200.txt +leaf. pg3200.txt, pg100.txt +leaf; pg100.txt +leafful pg3200.txt +leafy pg3200.txt +league pg3200.txt, pg100.txt +league! pg100.txt +league, pg3200.txt, pg100.txt +league. pg100.txt +league; pg100.txt +leagues pg3200.txt +leagues, pg100.txt +leagues- pg100.txt +leagues. pg3200.txt, pg100.txt +leagues." pg3200.txt +leak pg3200.txt, pg100.txt +leak!" pg3200.txt +leak, pg100.txt +leak. pg3200.txt +leak; pg3200.txt +leak?" pg3200.txt +leaked pg3200.txt +leaking pg3200.txt +leaking, pg3200.txt +leaky pg100.txt +lean pg3200.txt, pg100.txt +lean, pg100.txt +lean-to pg3200.txt +lean-to, pg3200.txt +lean. pg100.txt +lean; pg100.txt +leandro, pg3200.txt +leaned pg3200.txt +leaning pg31100.txt, pg3200.txt +leans pg3200.txt, pg100.txt +leans, pg100.txt +leant pg31100.txt +leap pg3200.txt, pg100.txt +leap' pg3200.txt +leap, pg100.txt +leap- pg100.txt +leap--an pg3200.txt +leap; pg100.txt +leaped pg3200.txt +leaped, pg3200.txt +leaping pg3200.txt, pg100.txt +leaping. pg3200.txt +leaps pg3200.txt +leapt pg100.txt +lear pg3200.txt, pg100.txt +lear! pg100.txt +lear's, pg3200.txt +lear, pg100.txt +lear. pg100.txt +learn pg31100.txt, pg3200.txt, pg100.txt +learn'd pg100.txt +learn'd, pg100.txt +learn'd. pg100.txt +learn, pg3200.txt, pg100.txt +learn. pg3200.txt, pg100.txt +learn." pg31100.txt, pg3200.txt +learn; pg31100.txt, pg100.txt +learned pg31100.txt, pg3200.txt, pg100.txt +learned, pg31100.txt, pg3200.txt +learned--no, pg3200.txt +learned--that pg3200.txt +learned. pg3200.txt, pg100.txt +learned? pg100.txt +learned?" pg3200.txt +learnedly pg100.txt +learners pg3200.txt +learning pg31100.txt, pg3200.txt +learning!" pg3200.txt +learning, pg31100.txt, pg3200.txt, pg100.txt +learning. pg3200.txt, pg100.txt +learning; pg3200.txt +learnings pg100.txt +learns pg3200.txt, pg100.txt +learnt pg31100.txt, pg3200.txt +learnt, pg31100.txt +learnt. pg31100.txt +learnt: pg100.txt +leary, pg3200.txt +leas pg100.txt +leas, pg100.txt +lease pg31100.txt, pg3200.txt, pg100.txt +lease, pg100.txt +lease. pg100.txt +lease." pg31100.txt +lease; pg100.txt +leash, pg100.txt +least pg31100.txt, pg3200.txt, pg100.txt +least'; pg3200.txt +least, pg31100.txt, pg3200.txt, pg100.txt +least--asia. pg3200.txt +least--you pg31100.txt +least. pg31100.txt, pg3200.txt, pg100.txt +least." pg31100.txt, pg3200.txt +least.' pg3200.txt +least: pg31100.txt +least; pg31100.txt, pg3200.txt +least? pg100.txt +leather, pg3200.txt +leather-headed pg3200.txt +leather-headed; pg3200.txt +leather. pg100.txt +leather; pg3200.txt +leathern pg3200.txt +leathers. pg3200.txt +leathers; pg3200.txt +leave pg31100.txt, pg3200.txt, pg100.txt +leave! pg100.txt +leave!" pg3200.txt +leave, pg31100.txt, pg3200.txt, pg100.txt +leave- pg100.txt +leave--but pg31100.txt +leave--discovered pg3200.txt +leave-taking, pg31100.txt, pg100.txt +leave-taking. pg100.txt +leave. pg31100.txt, pg3200.txt, pg100.txt +leave." pg31100.txt, pg3200.txt +leave: pg3200.txt, pg100.txt +leave; pg3200.txt, pg100.txt +leave? pg3200.txt, pg100.txt +leave?" pg3200.txt +leavening. pg100.txt +leaves pg31100.txt, pg3200.txt, pg100.txt +leaves! pg3200.txt +leaves, pg3200.txt, pg100.txt +leaves--passengers pg3200.txt +leaves. pg3200.txt, pg100.txt +leaves." pg31100.txt, pg3200.txt +leaves; pg3200.txt +leaving pg31100.txt, pg3200.txt, pg100.txt +leaving, pg3200.txt +leaving- pg100.txt +leaving. pg3200.txt +leavings pg3200.txt +leavitt?" pg3200.txt +leavy. pg100.txt +leb' pg3200.txt +lebanon pg3200.txt +lecco. pg3200.txt +lecher pg3200.txt +lecher's pg100.txt +lecher. pg3200.txt +lecher; pg100.txt +lechery, pg100.txt +lechery? pg100.txt +lecky's. pg3200.txt +lecky, pg3200.txt +lecture pg31100.txt, pg3200.txt +lecture, pg3200.txt +lecture-agent, pg3200.txt +lecture-courses pg3200.txt +lecture-lecture-lecture--but pg3200.txt +lecture-theater, pg3200.txt +lecture-tour pg3200.txt +lecture. pg3200.txt +lecture." pg3200.txt +lectured pg3200.txt +lecturer pg3200.txt +lecturer! pg3200.txt +lecturers; pg3200.txt +lectures pg3200.txt +lectures, pg3200.txt +lectures; pg3200.txt +lectureship pg3200.txt +lectureships; pg3200.txt +lecturing pg3200.txt +lecturing, pg3200.txt +lecturing. pg3200.txt +led pg31100.txt, pg3200.txt, pg100.txt +led, pg3200.txt, pg100.txt +led-- pg3200.txt +led. pg31100.txt +led.] pg100.txt +led? pg100.txt +ledge pg31100.txt, pg3200.txt +ledge. pg3200.txt +ledge?" pg3200.txt +ledger pg3200.txt +ledger, pg3200.txt +ledger. pg3200.txt +ledges pg3200.txt +leds pg3200.txt +lee pg3200.txt +lee--many's pg3200.txt +lee. pg3200.txt +lee." pg3200.txt +leech. pg100.txt +leek pg100.txt +leek, pg100.txt +leek. pg100.txt +leek; pg100.txt +leeks. pg100.txt +leer. pg100.txt +leering, pg3200.txt +leers pg100.txt +lees pg100.txt +leet, pg100.txt +leeward pg3200.txt +lef' pg3200.txt +left pg31100.txt, pg3200.txt, pg100.txt +left! pg3200.txt, pg100.txt +left!" pg3200.txt +left, pg31100.txt, pg3200.txt, pg100.txt +left--and pg3200.txt +left--don't pg3200.txt +left--ri-- pg3200.txt +left-hand pg3200.txt +left-handed pg3200.txt +left-handed." pg3200.txt +left-handers, pg3200.txt +left. pg3200.txt, pg100.txt +left." pg31100.txt, pg3200.txt +left.' pg3200.txt +left.] pg3200.txt +left; pg3200.txt, pg100.txt +left? pg3200.txt, pg100.txt +left?" pg31100.txt, pg3200.txt +left?' pg3200.txt +lefts. pg3200.txt +leg pg31100.txt, pg3200.txt, pg100.txt +leg!!!!!" pg3200.txt +leg!" pg3200.txt +leg" pg3200.txt +leg, pg3200.txt, pg100.txt +leg-chain pg3200.txt +leg. pg3200.txt, pg100.txt +leg." pg3200.txt +leg; pg31100.txt, pg3200.txt +leg? pg100.txt +legacies. pg100.txt +legacy pg31100.txt, pg3200.txt, pg100.txt +legacy? pg100.txt +legal pg31100.txt, pg3200.txt, pg100.txt +legality pg3200.txt +legally pg3200.txt +legally, pg3200.txt +legally?" pg3200.txt +legate pg100.txt +legation pg3200.txt +legen pg3200.txt +legend pg3200.txt +legend, pg3200.txt +legend. pg3200.txt +legend." pg3200.txt +legend: pg3200.txt +legend; pg3200.txt +legend?' pg3200.txt +legendary. pg3200.txt +legends pg3200.txt +legends, pg31100.txt +legends; pg3200.txt +legerdemain, pg3200.txt +legerity. pg100.txt +legg'd pg100.txt +legge. pg3200.txt +legged pg3200.txt +leggings, pg3200.txt +leghorn pg3200.txt +leghorn. pg3200.txt +legion pg3200.txt +legions pg3200.txt, pg100.txt +legions, pg100.txt +legions; pg100.txt +legislation pg3200.txt +legislation. pg3200.txt +legislative pg3200.txt +legislator. pg3200.txt +legislature pg3200.txt +legislature, pg3200.txt +legislature--then pg3200.txt +legislature. pg3200.txt +legislatures pg3200.txt +legitimate pg3200.txt +legitimate, pg100.txt +legitimate. pg3200.txt +legitimate: pg100.txt +legitimately pg3200.txt +leglets, pg3200.txt +legs pg3200.txt, pg100.txt +legs, pg31100.txt, pg3200.txt, pg100.txt +legs. pg3200.txt, pg100.txt +legs.- pg100.txt +lehi"; pg3200.txt +lei, pg3200.txt +leicester, pg100.txt +leiger. pg100.txt +leis pg3200.txt +leisure pg31100.txt, pg3200.txt, pg100.txt +leisure, pg3200.txt, pg100.txt +leisure. pg3200.txt, pg100.txt +leisure." pg31100.txt +leisure; pg3200.txt, pg100.txt +leisure? pg100.txt +leisurely pg31100.txt, pg3200.txt, pg100.txt +leisurely. pg3200.txt +lem pg3200.txt +lem's pg3200.txt +lem, pg3200.txt +leman. pg3200.txt +leman.' pg100.txt +lemme pg3200.txt +lemon. pg100.txt +lemonade, pg3200.txt +lemonade." pg3200.txt +lemons, pg3200.txt +lena pg3200.txt +lena? pg100.txt +lend pg31100.txt, pg3200.txt, pg100.txt +lend, pg100.txt +lend. pg3200.txt +lend: pg100.txt +lendest, pg3200.txt +lendings. pg3200.txt +lends pg100.txt +length pg31100.txt, pg3200.txt, pg100.txt +length"--a pg3200.txt +length, pg31100.txt, pg3200.txt, pg100.txt +length-- pg3200.txt +length. pg3200.txt, pg100.txt +length." pg31100.txt, pg3200.txt +length; pg3200.txt +length? pg100.txt +lengthen pg31100.txt +lengthening pg31100.txt +lengthens pg100.txt +lengths pg3200.txt +lengths. pg3200.txt +lengthways, pg3200.txt +lengthwise, pg3200.txt +lengthwise. pg3200.txt +lengthy pg3200.txt +lengthy. pg3200.txt +lenient." pg31100.txt +leniently. pg3200.txt +lenity pg100.txt +lenity, pg100.txt +lenity? pg100.txt +lennox. pg100.txt +lent pg31100.txt, pg3200.txt, pg100.txt +lent, pg100.txt +lent; pg100.txt +lent? pg100.txt +lenten pg100.txt +leonardo pg3200.txt +leonato's? pg100.txt +leonato, pg100.txt +leonato. pg100.txt +leonato.] pg100.txt +leonato? pg100.txt +leonato]. pg100.txt +leonatus pg100.txt +leonatus! pg100.txt +leonatus, pg100.txt +leonatus.' pg100.txt +leonatus; pg100.txt +leonatus? pg100.txt +leonidas pg3200.txt +leontes pg100.txt +leontes, pg100.txt +leontes. pg100.txt +leontes? pg100.txt +leopard pg3200.txt +leopard, pg100.txt +leopard. pg3200.txt +leopold; pg3200.txt +leos, pg3200.txt +leper pg3200.txt +leper-pus pg3200.txt +lepers, pg3200.txt +lepidus pg100.txt +lepidus! pg100.txt +lepidus!' pg100.txt +lepidus, pg100.txt +lepidus. pg100.txt +lepidus? pg100.txt +lepidus] pg100.txt +leprosy pg3200.txt +leprosy! pg3200.txt +lernen pg3200.txt +les pg3200.txt, pg100.txt +leschetitzky pg3200.txt +lesley pg31100.txt +lesley's pg31100.txt +lesley, pg31100.txt +lesley-castle pg31100.txt +lesley-castle, pg31100.txt +lesley. pg31100.txt +less pg31100.txt, pg3200.txt, pg100.txt +less, pg31100.txt, pg3200.txt, pg100.txt +less. pg31100.txt, pg3200.txt, pg100.txt +less." pg31100.txt, pg3200.txt +less: pg100.txt +less; pg31100.txt, pg3200.txt, pg100.txt +less? pg100.txt +lessen pg31100.txt +lessen. pg31100.txt +lessened pg31100.txt +lessened, pg31100.txt +lessening pg31100.txt +lesser pg31100.txt, pg3200.txt, pg100.txt +lesser; pg100.txt +lesson pg31100.txt, pg3200.txt +lesson, pg3200.txt +lesson-sermon pg3200.txt +lesson. pg31100.txt, pg3200.txt +lesson." pg31100.txt, pg3200.txt +lesson.' pg3200.txt +lesson; pg3200.txt +lessons pg31100.txt, pg3200.txt +lessons--morals. pg3200.txt +lessons. pg3200.txt +lessons." pg3200.txt +lessons?" pg3200.txt +lest pg31100.txt, pg3200.txt, pg100.txt +lestrake. pg100.txt +let's pg3200.txt, pg100.txt +let's--" pg3200.txt +let, pg3200.txt +let--" pg3200.txt +let. pg3200.txt +lethargy. pg3200.txt +lethargy: pg3200.txt +lethargy? pg100.txt +lethe. pg100.txt +lets pg3200.txt, pg100.txt +lett'red? pg100.txt +letter pg31100.txt, pg3200.txt, pg100.txt +letter! pg100.txt +letter!" pg31100.txt +letter' pg3200.txt +letter, pg31100.txt, pg3200.txt, pg100.txt +letter,) pg31100.txt +letter- pg3200.txt, pg100.txt +letter-- pg31100.txt +letter--" pg3200.txt +letter----" pg3200.txt +letter--and pg3200.txt +letter--but pg31100.txt +letter--how pg3200.txt +letter--tribune. pg3200.txt +letter-correspondence pg3200.txt +letter-scribbling. pg3200.txt +letter-writing! pg31100.txt +letter-writing, pg3200.txt +letter-writing--and pg3200.txt +letter. pg31100.txt, pg3200.txt, pg100.txt +letter." pg31100.txt, pg3200.txt +letter.' pg100.txt +letter.) pg3200.txt +letter.] pg3200.txt, pg100.txt +letter: pg3200.txt, pg100.txt +letter; pg31100.txt, pg3200.txt, pg100.txt +letter? pg31100.txt, pg3200.txt, pg100.txt +letter?" pg31100.txt, pg3200.txt +letter] pg100.txt +letter]. pg100.txt +lettered pg3200.txt +letterpress." pg3200.txt +letters pg31100.txt, pg3200.txt, pg100.txt +letters! pg100.txt +letters, pg31100.txt, pg3200.txt, pg100.txt +letters--one pg3200.txt +letters--they pg3200.txt +letters--well?" pg3200.txt +letters--which pg3200.txt +letters. pg31100.txt, pg3200.txt, pg100.txt +letters." pg31100.txt, pg3200.txt +letters: pg3200.txt +letters; pg3200.txt +letters? pg100.txt +letters?" pg3200.txt +letters] pg100.txt +letting pg31100.txt, pg3200.txt +letting-on pg3200.txt +lettres, pg3200.txt +leukerbad. pg3200.txt +leur pg100.txt +levant pg3200.txt +levantine; pg3200.txt +levee pg3200.txt +levee-rim, pg3200.txt +leveed, pg3200.txt +levees pg3200.txt +levees. pg3200.txt +level pg3200.txt, pg100.txt +level, pg3200.txt +level. pg3200.txt +level." pg3200.txt +leveled pg3200.txt +leveler pg3200.txt +levels pg3200.txt +levels. pg3200.txt +leven? pg3200.txt +lever. pg3200.txt +levi pg3200.txt +leviathan pg100.txt +leviathans pg100.txt +levied pg3200.txt, pg100.txt +levied, pg100.txt +levied; pg100.txt +levies, pg3200.txt, pg100.txt +levies. pg3200.txt +levity pg31100.txt, pg3200.txt, pg100.txt +levy pg3200.txt, pg100.txt +levy, pg100.txt +levying pg100.txt +lewd pg3200.txt +lewis pg3200.txt, pg100.txt +lewis, pg3200.txt +lewis." pg3200.txt +lewis: pg3200.txt +li. pg3200.txt +lia- pg100.txt +liabilities, pg3200.txt +liability pg3200.txt, pg100.txt +liability, pg100.txt +liable pg31100.txt, pg3200.txt +liable, pg100.txt +liable. pg100.txt +liar pg3200.txt, pg100.txt +liar!" pg3200.txt +liar, pg3200.txt, pg100.txt +liar--evidence pg3200.txt +liar. pg3200.txt, pg100.txt +liar." pg3200.txt +liar; pg100.txt +liars pg3200.txt, pg100.txt +liars! pg100.txt +liars. pg3200.txt, pg100.txt +liars; pg3200.txt +libel. pg3200.txt +libel." pg3200.txt +libeller), pg3200.txt +liberal pg31100.txt, pg3200.txt, pg100.txt +liberal! pg31100.txt +liberal, pg31100.txt, pg100.txt +liberal.' pg100.txt +liberality pg31100.txt, pg3200.txt +liberality, pg31100.txt, pg100.txt +liberality? pg100.txt +liberally pg3200.txt +liberally--with pg3200.txt +liberally. pg3200.txt +liberate pg3200.txt +liberation pg3200.txt +liberties pg3200.txt +liberties!" pg3200.txt +liberties, pg3200.txt +liberties. pg3200.txt, pg100.txt +liberties." pg3200.txt +libertine, pg100.txt +libertine. pg100.txt +libertines pg100.txt +liberty pg31100.txt, pg3200.txt, pg100.txt +liberty! pg100.txt +liberty!" pg100.txt +liberty!' pg100.txt +liberty, pg31100.txt, pg3200.txt, pg100.txt +liberty-pole, pg3200.txt +liberty-pole. pg3200.txt +liberty. pg31100.txt, pg3200.txt, pg100.txt +liberty." pg3200.txt +liberty; pg100.txt +liberty? pg100.txt +librarians, pg3200.txt +library pg31100.txt, pg3200.txt, pg100.txt +library* pg100.txt +library, pg31100.txt, pg3200.txt, pg100.txt +library. pg31100.txt, pg3200.txt +library." pg31100.txt, pg3200.txt +library; pg3200.txt +libya. pg100.txt +licence, pg100.txt +licence. pg31100.txt +license pg31100.txt, pg3200.txt, pg100.txt +license, pg100.txt +license. pg31100.txt, pg3200.txt, pg100.txt +license." pg3200.txt +licenses, pg100.txt +licentious, pg100.txt +licentiousness. pg3200.txt +licio pg100.txt +licio- pg100.txt +licio. pg100.txt +lick pg3200.txt, pg100.txt +lick!"] pg3200.txt +lick, pg100.txt +lick? pg100.txt +licked pg3200.txt +licks pg3200.txt +licorice-root, pg3200.txt +lictors pg100.txt +lictors, pg100.txt +lid pg3200.txt +lid, pg31100.txt, pg3200.txt +lid. pg3200.txt +lid: pg3200.txt +lid; pg100.txt +lids pg100.txt +lie pg31100.txt, pg3200.txt, pg100.txt +lie! pg3200.txt, pg100.txt +lie!" pg3200.txt +lie!' pg3200.txt +lie' pg3200.txt +lie, pg3200.txt, pg100.txt +lie- pg100.txt +lie--559. pg3200.txt +lie--could pg3200.txt +lie--that pg3200.txt +lie. pg3200.txt, pg100.txt +lie." pg3200.txt +lie.' pg3200.txt +lie: pg3200.txt, pg100.txt +lie; pg3200.txt, pg100.txt +lie? pg3200.txt, pg100.txt +lie?" pg3200.txt +lie?' pg3200.txt +lieb. pg3200.txt +liebe! pg3200.txt +liebste! pg3200.txt +lied pg3200.txt +lied! pg100.txt +lied!' pg3200.txt +lied, pg3200.txt +lied." pg3200.txt +lied? pg100.txt +lief pg100.txt +liege pg100.txt +liege! pg100.txt +liege!" pg3200.txt +liege!- pg100.txt +liege!--of pg3200.txt +liege, pg100.txt +liege- pg100.txt +liege. pg100.txt +liege." pg3200.txt +liege; pg100.txt +liege? pg100.txt +lien pg100.txt +lies pg31100.txt, pg3200.txt, pg100.txt +lies! pg3200.txt, pg100.txt +lies!" pg3200.txt +lies, pg3200.txt, pg100.txt +lies- pg100.txt +lies--" pg3200.txt +lies--there pg3200.txt +lies. pg31100.txt, pg3200.txt, pg100.txt +lies." pg3200.txt +lies; pg3200.txt, pg100.txt +lies? pg3200.txt, pg100.txt +lies?" pg3200.txt +liest! pg100.txt +liest, pg100.txt +liest. pg100.txt +liest; pg100.txt +lieu pg31100.txt, pg3200.txt, pg100.txt +lieutenancy pg3200.txt +lieutenant pg31100.txt, pg3200.txt, pg100.txt +lieutenant's pg31100.txt +lieutenant) pg31100.txt +lieutenant, pg31100.txt, pg100.txt +lieutenant-general pg3200.txt +lieutenant-general's pg3200.txt +lieutenant-general. pg3200.txt +lieutenant-governor pg3200.txt +lieutenant. pg100.txt +lieutenant." pg31100.txt +lieutenant; pg3200.txt +lieutenant? pg100.txt +lieutenants pg3200.txt +lieutenants, pg100.txt +lieves pg3200.txt +life pg31100.txt, pg3200.txt, pg100.txt +life! pg31100.txt, pg3200.txt, pg100.txt +life!" pg31100.txt, pg3200.txt +life!' pg100.txt +life', pg3200.txt +life, pg31100.txt, pg3200.txt, pg100.txt +life," pg31100.txt, pg3200.txt +life- pg100.txt +life--' pg3200.txt +life---. pg31100.txt +life--an pg3200.txt +life--come, pg3200.txt +life--ordination pg31100.txt +life--required pg31100.txt +life--that pg3200.txt +life--want pg31100.txt +life-charts pg3200.txt +life-conflict pg3200.txt +life-history pg3200.txt +life-interest pg31100.txt +life-long pg3200.txt +life-preserver pg3200.txt +life-preserver! pg3200.txt +life-preserver, pg3200.txt +life-saver. pg3200.txt +life-size pg3200.txt +life. pg31100.txt, pg3200.txt, pg100.txt +life." pg31100.txt, pg3200.txt +life.' pg3200.txt, pg100.txt +life..... pg3200.txt +life.] pg3200.txt +life: pg31100.txt, pg100.txt +life; pg31100.txt, pg3200.txt, pg100.txt +life? pg3200.txt, pg100.txt +life?" pg31100.txt, pg3200.txt +lifeboats pg3200.txt +lifeless pg3200.txt +lifeless, pg3200.txt +lifeless. pg3200.txt +lifelike pg3200.txt +lifelong pg3200.txt +lifelong. pg3200.txt +lifetime pg3200.txt +lifetime! pg3200.txt +lifetime, pg3200.txt +lifetime--" pg3200.txt +lifetime. pg31100.txt, pg3200.txt +lifetime? pg3200.txt +lifetimes, pg3200.txt +lift pg3200.txt, pg100.txt +lift, pg3200.txt +lift. pg3200.txt +lift." pg3200.txt +lift; pg3200.txt +lifted pg3200.txt +lifted, pg3200.txt +lifter? pg100.txt +lifting pg31100.txt, pg3200.txt +lifts pg3200.txt +ligarius, pg100.txt +ligarius. pg100.txt +light pg31100.txt, pg3200.txt, pg100.txt +light! pg31100.txt, pg3200.txt, pg100.txt +light, pg31100.txt, pg3200.txt, pg100.txt +light- pg100.txt +light--always pg3200.txt +light--in pg3200.txt +light--the pg3200.txt +light--they pg3200.txt +light--we pg3200.txt +light-blue pg3200.txt +light-hearted pg3200.txt +light. pg31100.txt, pg3200.txt, pg100.txt +light." pg3200.txt +light: pg3200.txt +light; pg31100.txt, pg3200.txt, pg100.txt +light? pg100.txt +light?" pg31100.txt, pg3200.txt +lighted pg31100.txt, pg3200.txt, pg100.txt +lighted, pg3200.txt +lighted. pg3200.txt +lighted: pg3200.txt +lighten pg3200.txt +lightened pg3200.txt +lightened, pg3200.txt +lightens pg3200.txt +lightens] pg100.txt +lighter. pg3200.txt +lighthearted pg3200.txt +lighthouse pg3200.txt +lighting pg3200.txt +lighting. pg3200.txt +lightly pg3200.txt, pg100.txt +lightly, pg31100.txt, pg3200.txt, pg100.txt +lightly. pg31100.txt +lightness pg31100.txt +lightness, pg100.txt +lightness-- pg3200.txt +lightning pg3200.txt, pg100.txt +lightning!' pg3200.txt +lightning's pg3200.txt +lightning, pg3200.txt +lightning-flash, pg3200.txt +lightning-rod pg3200.txt +lightning-rod, pg3200.txt +lightning-rod." pg3200.txt +lightning-rods pg3200.txt +lightning-rods. pg3200.txt +lightning-winged pg3200.txt +lightning. pg3200.txt, pg100.txt +lightning." pg3200.txt +lightning.' pg3200.txt +lightnings pg3200.txt +lightnings! pg3200.txt +lightnings!" pg3200.txt +lightnings, pg3200.txt +lightnings; pg3200.txt +lights pg3200.txt, pg100.txt +lights! pg100.txt +lights, pg3200.txt, pg100.txt +lights. pg3200.txt, pg100.txt +lights; pg3200.txt +lights?" pg3200.txt +lightsome pg3200.txt +ligneous pg3200.txt +liholiho pg3200.txt +liholiho.' pg3200.txt +lii pg3200.txt +lii. pg3200.txt +liii. pg3200.txt +lik'd pg100.txt +like! pg100.txt +like!" pg31100.txt, pg3200.txt +like) pg3200.txt +like, pg31100.txt, pg3200.txt, pg100.txt +like- pg100.txt +like--" pg3200.txt +like--' pg3200.txt +like--awful, pg3200.txt +like--but pg31100.txt +like--like pg3200.txt +like--may pg3200.txt +like--this pg3200.txt +like. pg31100.txt, pg3200.txt, pg100.txt +like." pg31100.txt, pg3200.txt +like: pg3200.txt +like; pg3200.txt, pg100.txt +like? pg31100.txt, pg100.txt +like?" pg3200.txt +like?' pg3200.txt +liked pg31100.txt, pg3200.txt, pg100.txt +liked, pg3200.txt +liked--especially pg3200.txt +liked. pg31100.txt, pg100.txt +liked; pg3200.txt +liked;--that pg3200.txt +likeliest pg31100.txt +likelihood pg31100.txt +likelihood, pg100.txt +likelihood. pg100.txt +likelihood? pg100.txt +likelihoods pg100.txt +likelihoods, pg3200.txt +likely pg31100.txt, pg3200.txt, pg100.txt +likely! pg100.txt +likely, pg31100.txt, pg3200.txt, pg100.txt +likely. pg31100.txt, pg3200.txt, pg100.txt +likely." pg31100.txt, pg3200.txt +likely; pg3200.txt +likely?" pg3200.txt +likened pg3200.txt +likeness pg31100.txt +likeness, pg31100.txt +likeness. pg31100.txt, pg3200.txt, pg100.txt +likeness." pg31100.txt +likeness?" pg3200.txt +likenesses pg3200.txt +likes pg31100.txt, pg3200.txt, pg100.txt +likes. pg3200.txt, pg100.txt +likes." pg31100.txt, pg3200.txt +likes; pg100.txt +likest pg100.txt +likewise pg31100.txt, pg3200.txt +likewise) pg3200.txt +likewise, pg31100.txt, pg3200.txt +likewise--and pg3200.txt +likewise. pg31100.txt, pg3200.txt, pg100.txt +likewise." pg31100.txt +likewise; pg31100.txt +liking pg31100.txt, pg3200.txt, pg100.txt +liking! pg100.txt +liking, pg3200.txt, pg100.txt +liking. pg3200.txt, pg100.txt +liking? pg100.txt +likings pg100.txt +likings. pg100.txt +lilacs pg3200.txt +lilies pg3200.txt +liliputian pg3200.txt +lily! pg100.txt +lily, pg100.txt +lily." pg3200.txt +limb pg3200.txt +limb, pg3200.txt +limb- pg100.txt +limb-meal! pg100.txt +limb. pg100.txt +limber pg3200.txt +limbo pg100.txt +limbs pg3200.txt, pg100.txt +limbs!" pg31100.txt +limbs, pg31100.txt, pg3200.txt, pg100.txt +limbs. pg3200.txt, pg100.txt +limbs; pg3200.txt, pg100.txt +limbs? pg100.txt +lime pg100.txt +lime!" pg3200.txt +lime, pg3200.txt, pg100.txt +lime-kiln. pg100.txt +limerick pg3200.txt +limerick," pg3200.txt +limes, pg31100.txt +limestone pg3200.txt +limit pg3200.txt +limit! pg3200.txt +limit, pg100.txt +limit. pg3200.txt, pg100.txt +limit; pg3200.txt +limitation, pg100.txt +limitation. pg3200.txt +limitations pg3200.txt +limitations. pg3200.txt +limited pg31100.txt, pg3200.txt, pg100.txt +limited, pg100.txt +limited--" pg31100.txt +limitless pg3200.txt +limitless, pg3200.txt +limits pg3200.txt, pg100.txt +limits, pg31100.txt +limits. pg3200.txt +limousin pg3200.txt +limp pg3200.txt, pg100.txt +limp? pg100.txt +limped pg3200.txt +limpid pg3200.txt +limpid, pg3200.txt +limping pg3200.txt +lin'd pg100.txt +lin'd, pg100.txt +lincoln pg3200.txt +lincoln, pg3200.txt +linden, pg3200.txt +line pg31100.txt, pg3200.txt, pg100.txt +line! pg3200.txt +line" pg3200.txt +line, pg31100.txt, pg3200.txt, pg100.txt +line-- pg3200.txt +line--mainly?" pg3200.txt +line. pg31100.txt, pg3200.txt, pg100.txt +line." pg31100.txt, pg3200.txt +line.' pg3200.txt +line: pg3200.txt +line; pg3200.txt +line?" pg3200.txt +lineage, pg3200.txt +lineal pg3200.txt +lineament, pg100.txt +lineaments pg100.txt +lineaments, pg100.txt +lined pg3200.txt +linen pg3200.txt +linen, pg3200.txt +linen-draper." pg31100.txt +linen. pg100.txt +linen; pg100.txt +linen] pg100.txt +liner. pg3200.txt +lines pg31100.txt, pg3200.txt, pg100.txt +lines! pg3200.txt +lines, pg31100.txt, pg3200.txt, pg100.txt +lines--and pg3200.txt +lines. pg31100.txt, pg3200.txt, pg100.txt +lines." pg31100.txt +lines: pg3200.txt +lines; pg3200.txt, pg100.txt +lines? pg100.txt +lines?" pg3200.txt +ling pg100.txt +ling'ring, pg100.txt +lingam pg3200.txt +lingamburg. pg3200.txt +lingare, pg100.txt +linger pg3200.txt, pg100.txt +lingered pg31100.txt, pg3200.txt, pg100.txt +lingering pg3200.txt +lingers pg3200.txt +lingers, pg3200.txt +liniment pg3200.txt +lining pg3200.txt +link pg3200.txt +link'd, pg100.txt +link. pg3200.txt +link: pg3200.txt +linked pg31100.txt, pg3200.txt, pg100.txt +linking pg3200.txt +links pg3200.txt +links," pg3200.txt +links; pg100.txt +linnet." pg3200.txt +linsey-woolsey." pg3200.txt +linstock pg100.txt +lint pg3200.txt +lint, pg3200.txt +linta. pg100.txt +linz, pg3200.txt +lion pg3200.txt, pg100.txt +lion's pg100.txt +lion, pg100.txt +lion,--and pg3200.txt +lion-heart!--that pg3200.txt +lion. pg31100.txt, pg3200.txt, pg100.txt +lion; pg3200.txt +lion? pg3200.txt, pg100.txt +lionel pg3200.txt +lionel, pg3200.txt +liones. pg3200.txt +lioness, pg100.txt +lioness? pg100.txt +lions pg3200.txt, pg100.txt +lions, pg3200.txt +lions. pg100.txt +lip pg3200.txt, pg100.txt +lip! pg100.txt +lip, pg31100.txt, pg100.txt +lip-- pg3200.txt +lip--masked, pg3200.txt +lip. pg100.txt +lip; pg100.txt +lip? pg100.txt +lips pg31100.txt, pg3200.txt, pg100.txt +lips! pg100.txt +lips, pg31100.txt, pg3200.txt, pg100.txt +lips- pg100.txt +lips--for pg3200.txt +lips. pg31100.txt, pg3200.txt, pg100.txt +lips." pg31100.txt, pg3200.txt +lips.) pg3200.txt +lips.] pg3200.txt +lips; pg3200.txt, pg100.txt +lips? pg100.txt +liquid pg3200.txt +liquid, pg3200.txt +liquidation pg3200.txt +liquidation. pg3200.txt +liquids. pg3200.txt +liquify: pg3200.txt +liquor pg3200.txt, pg100.txt +liquor, pg31100.txt, pg3200.txt +liquor-drinking, pg3200.txt +liquor. pg31100.txt, pg3200.txt, pg100.txt +liquor." pg3200.txt +liquor.' pg100.txt +liquors pg3200.txt +liquors: pg3200.txt +lire. pg3200.txt +lis, pg3200.txt +lisa's. pg3200.txt +lisbon, pg3200.txt +lisping pg100.txt +list pg31100.txt, pg3200.txt, pg100.txt +list! pg100.txt +list" pg3200.txt +list, pg3200.txt, pg100.txt +list--accompanied pg3200.txt +list. pg3200.txt, pg100.txt +list." pg3200.txt +list.' pg100.txt +list: pg3200.txt +list; pg3200.txt +list? pg3200.txt +list?" pg3200.txt +listed pg3200.txt +listen pg31100.txt, pg3200.txt +listen'd pg100.txt +listen, pg31100.txt, pg3200.txt +listen. pg31100.txt, pg3200.txt +listen." pg3200.txt +listen.] pg3200.txt +listen: pg3200.txt +listen; pg31100.txt, pg100.txt +listened pg31100.txt, pg3200.txt +listened, pg31100.txt, pg3200.txt +listened-- pg3200.txt +listened. pg3200.txt +listened; pg31100.txt, pg3200.txt +listener pg31100.txt, pg3200.txt +listener's pg3200.txt +listener. pg3200.txt +listener: pg3200.txt +listeners pg3200.txt +listeners--his pg3200.txt +listeners. pg3200.txt +listening pg31100.txt, pg3200.txt +listening, pg3200.txt +listening. pg31100.txt, pg3200.txt +listening?" pg3200.txt +listens pg3200.txt +listens.] pg3200.txt +listens; pg3200.txt +listless pg3200.txt +listlessly pg3200.txt +listlessness, pg31100.txt +lists pg31100.txt, pg3200.txt, pg100.txt +lists, pg3200.txt, pg100.txt +lists. pg100.txt +lists? pg100.txt +lists] pg100.txt +lit--" pg3200.txt +lit. pg3200.txt +literally pg3200.txt +literary pg31100.txt, pg3200.txt, pg100.txt +literary, pg3200.txt +literature pg3200.txt +literature, pg3200.txt +literature," pg3200.txt +literature. pg3200.txt +literature." pg3200.txt +literature: pg3200.txt +literature; pg3200.txt +lithe pg3200.txt +lithograph pg3200.txt +lithographed pg3200.txt +lithographs pg3200.txt +litigation, pg3200.txt +litigation. pg3200.txt +littered pg3200.txt +litters; pg3200.txt +littery pg3200.txt +little, pg31100.txt, pg3200.txt, pg100.txt +little- pg100.txt +little--got pg3200.txt +little--i'd pg3200.txt +little--weaken pg3200.txt +little-while-longer pg3200.txt +little. pg31100.txt, pg3200.txt, pg100.txt +little." pg31100.txt, pg3200.txt +little: pg31100.txt +little; pg31100.txt, pg3200.txt, pg100.txt +little? pg3200.txt +little?" pg3200.txt +littleness pg31100.txt +littlest pg3200.txt +liv'd pg100.txt +liv'd! pg100.txt +liv'd, pg100.txt +liv'd. pg100.txt +liv'ry pg100.txt +liv'st pg100.txt +liv'st! pg100.txt +liv'st, pg100.txt +liv'st. pg100.txt +liv. pg3200.txt +live pg31100.txt, pg3200.txt, pg100.txt +live! pg100.txt +live!" pg3200.txt +live'- pg100.txt +live, pg3200.txt, pg100.txt +live- pg100.txt +live--and pg3200.txt +live. pg3200.txt, pg100.txt +live." pg3200.txt +live.' pg100.txt +live: pg100.txt +live; pg3200.txt, pg100.txt +live? pg3200.txt, pg100.txt +live?" pg3200.txt +liveable." pg31100.txt +lived pg31100.txt, pg3200.txt, pg100.txt +lived, pg31100.txt, pg3200.txt +lived--for pg3200.txt +lived--wholly pg3200.txt +lived. pg3200.txt +lived." pg31100.txt, pg3200.txt +lived; pg31100.txt +liveliest pg31100.txt, pg3200.txt +livelihood pg3200.txt +livelihood, pg3200.txt +livelihood. pg3200.txt +liveliness pg3200.txt +lively pg31100.txt, pg3200.txt +lively!" pg3200.txt +lively), pg31100.txt +lively, pg31100.txt, pg3200.txt +lively. pg3200.txt, pg100.txt +lively; pg31100.txt, pg3200.txt +liver pg100.txt +liver) pg100.txt +liver. pg100.txt +liveried pg3200.txt +liveries pg3200.txt +liveries--two pg3200.txt +liveries; pg100.txt +liverpool pg3200.txt +liverpool, pg31100.txt +liverpool. pg3200.txt +livers pg3200.txt, pg100.txt +livery pg100.txt +livery, pg100.txt +livery-stable pg3200.txt +livery. pg100.txt +livery." pg31100.txt +livery: pg100.txt +livery; pg3200.txt, pg100.txt +lives pg3200.txt, pg100.txt +lives! pg100.txt +lives!" pg3200.txt +lives!' pg3200.txt +lives, pg31100.txt, pg3200.txt, pg100.txt +lives- pg100.txt +lives--" pg3200.txt +lives---- pg3200.txt +lives. pg31100.txt, pg3200.txt, pg100.txt +lives." pg3200.txt +lives; pg3200.txt, pg100.txt +lives?" pg31100.txt +livest-looking pg3200.txt +livest; pg100.txt +liveth pg3200.txt +livia; pg100.txt +livid pg3200.txt +livid. pg3200.txt +living pg31100.txt, pg3200.txt, pg100.txt +living! pg100.txt +living's pg31100.txt +living, pg31100.txt, pg3200.txt, pg100.txt +living- pg100.txt +living--" pg3200.txt +living--has pg3200.txt +living-room, pg3200.txt +living. pg31100.txt, pg3200.txt, pg100.txt +living." pg31100.txt +living: pg3200.txt +living; pg3200.txt, pg100.txt +living? pg100.txt +living?" pg31100.txt, pg3200.txt +livings pg31100.txt +livingstone pg3200.txt +livingstone, pg3200.txt +livres. pg3200.txt +livy pg3200.txt +livy's pg3200.txt +livy's) pg3200.txt +livy. pg3200.txt +lix. pg3200.txt +lizard pg3200.txt +lize pg3200.txt +lize?" pg3200.txt +lizzy pg31100.txt +lizzy!" pg31100.txt +lizzy, pg31100.txt +lizzy,-- pg31100.txt +lizzy--if pg31100.txt +lizzy." pg31100.txt +lizzy; pg31100.txt +lizzy?" pg31100.txt +llama pg3200.txt +lo! pg3200.txt, pg100.txt +lo, pg3200.txt, pg100.txt +loa! pg100.txt +loach. pg100.txt +load pg3200.txt, pg100.txt +load, pg3200.txt, pg100.txt +load- pg100.txt +load. pg3200.txt +load; pg100.txt +load? pg100.txt +loaded pg3200.txt +loaded, pg3200.txt +loaded--for pg3200.txt +loaded. pg3200.txt +loaded?" pg3200.txt +loading pg31100.txt +loads pg3200.txt +loads, pg100.txt +loads. pg3200.txt +loads." pg3200.txt +loadstone pg3200.txt +loaf pg3200.txt +loafed pg3200.txt +loafer pg3200.txt +loafer's pg3200.txt +loafer, pg3200.txt +loafer. pg3200.txt +loafers pg3200.txt +loafing pg3200.txt +loafing, pg3200.txt +loafing-place pg3200.txt +loafs pg3200.txt +loan pg31100.txt, pg3200.txt +loan--acting pg3200.txt +loan; pg100.txt +loaned pg3200.txt +loans pg3200.txt +loath pg100.txt +loath, pg100.txt +loath. pg3200.txt +loath; pg100.txt +loathe pg3200.txt +loathed pg3200.txt +loathes pg100.txt +loathes. pg100.txt +loathing pg100.txt +loathings? pg31100.txt +loathly pg100.txt +loaves pg3200.txt, pg100.txt +lobby pg3200.txt +lobby, pg31100.txt +lobby. pg3200.txt, pg100.txt +lobbyist pg3200.txt +lobbyist, pg3200.txt +lobengula pg3200.txt +local pg3200.txt +local; pg3200.txt +localities pg3200.txt +localities. pg3200.txt +localities; pg3200.txt +locality pg3200.txt +locality, pg3200.txt +locality. pg3200.txt +locate pg3200.txt +located pg31100.txt, pg3200.txt, pg100.txt +located, pg3200.txt +located. pg3200.txt +located; pg3200.txt +location pg3200.txt +locations pg31100.txt, pg3200.txt, pg100.txt +lock pg31100.txt, pg3200.txt, pg100.txt +lock! pg3200.txt +lock'd pg100.txt +lock'd, pg100.txt +lock'd. pg100.txt +lock'd? pg100.txt +lock, pg31100.txt, pg3200.txt +lock-jaw. pg3200.txt +lock-up pg3200.txt +lock. pg3200.txt, pg100.txt +lock." pg31100.txt +locked pg31100.txt, pg3200.txt +locked!" pg3200.txt +locked, pg3200.txt +locked. pg3200.txt +locker, pg3200.txt +locker--sofa pg3200.txt +locking pg3200.txt +lockjaw pg3200.txt +lockjaw. pg3200.txt +locks pg3200.txt, pg100.txt +locks, pg3200.txt, pg100.txt +locks; pg3200.txt +lockup, pg3200.txt +locomotion. pg3200.txt +locust-plague pg3200.txt +locusts, pg100.txt +lode, pg3200.txt +lodestone pg3200.txt +lodg'd pg100.txt +lodg'd; pg100.txt +lodge pg3200.txt, pg100.txt +lodge, pg31100.txt, pg3200.txt +lodge-fire pg3200.txt +lodge-gates; pg31100.txt +lodge. pg31100.txt, pg3200.txt, pg100.txt +lodged pg3200.txt +lodged. pg3200.txt, pg100.txt +lodgers. pg100.txt +lodges pg3200.txt +lodging pg31100.txt, pg3200.txt +lodging, pg3200.txt, pg100.txt +lodging. pg3200.txt, pg100.txt +lodgings pg31100.txt, pg3200.txt +lodgings, pg31100.txt, pg3200.txt +lodgings--an pg3200.txt +lodgings. pg31100.txt, pg3200.txt +lodgings." pg3200.txt +lodgment pg3200.txt +lodgment. pg3200.txt +lodovico pg100.txt +lodovico? pg100.txt +lodowick pg100.txt +lodowick. pg100.txt +lodowick? pg100.txt +loft pg3200.txt +loft, pg3200.txt +loft." pg3200.txt +loftiest pg3200.txt +loftily pg3200.txt +loftily: pg3200.txt +loftiness pg3200.txt +lofty pg31100.txt, pg3200.txt, pg100.txt +lofty, pg100.txt +log pg3200.txt, pg100.txt +log, pg3200.txt +log-- pg3200.txt +log-book pg3200.txt +log-man. pg100.txt +log. pg3200.txt +log.[3] pg3200.txt +log: pg3200.txt +logarythm!" pg3200.txt +loggia, pg3200.txt +logic pg3200.txt +logic! pg3200.txt +logic, pg3200.txt +logic; pg3200.txt +logris. pg3200.txt +logs pg3200.txt, pg100.txt +logs, pg3200.txt +logs--and pg3200.txt +logs. pg3200.txt, pg100.txt +logs." pg3200.txt +logs; pg3200.txt +lohier pg3200.txt +lohier, pg3200.txt +loins pg100.txt +loins! pg100.txt +loins'? pg100.txt +loins, pg3200.txt, pg100.txt +loins. pg3200.txt, pg100.txt +loire. pg3200.txt +loire." pg3200.txt +loiter pg31100.txt +loitered pg31100.txt, pg3200.txt +loitering pg31100.txt, pg3200.txt +loitering? pg100.txt +loiterings pg3200.txt +loll pg3200.txt +lolled pg3200.txt +lolling pg3200.txt +lolling, pg100.txt +lombardy, pg100.txt +lon." pg3200.txt +london pg31100.txt, pg3200.txt, pg100.txt +london! pg31100.txt +london!" pg3200.txt +london's pg31100.txt +london) pg31100.txt +london). pg3200.txt +london, pg31100.txt, pg3200.txt, pg100.txt +london," pg31100.txt +london--1874 pg3200.txt +london--and pg31100.txt +london--the pg3200.txt +london. pg31100.txt, pg3200.txt, pg100.txt +london." pg31100.txt, pg3200.txt +london.--ibid. pg3200.txt +london: pg3200.txt +london; pg31100.txt, pg100.txt +london? pg31100.txt, pg100.txt +london?" pg31100.txt, pg3200.txt +londoners pg100.txt +lone pg3200.txt +loneliness pg3200.txt +loneliness, pg3200.txt +loneliness. pg3200.txt +lonely pg31100.txt, pg3200.txt +lonely, pg3200.txt +lonely; pg3200.txt +lonesome pg3200.txt +lonesome! pg3200.txt +lonesome, pg3200.txt +lonesome. pg3200.txt +lonesome." pg3200.txt +lonesomeness. pg3200.txt +lonesomer pg3200.txt +lonesomest pg3200.txt +lonesomest. pg3200.txt +long pg31100.txt, pg3200.txt, pg100.txt +long! pg100.txt +long!" pg3200.txt +long'd pg100.txt +long'st, pg100.txt +long'st- pg100.txt +long, pg31100.txt, pg3200.txt, pg100.txt +long," pg31100.txt +long- pg3200.txt +long--" pg31100.txt +long--and pg3200.txt +long--en pg3200.txt +long--ever pg3200.txt +long--so pg3200.txt +long-a. pg100.txt +long-armed, pg3200.txt +long-departed pg3200.txt +long-drawn pg3200.txt +long-forgotten pg3200.txt +long-haired pg3200.txt +long-handled pg3200.txt +long-legged pg3200.txt +long-lived, pg3200.txt +long-liver. pg3200.txt +long-missing pg3200.txt +long-oval pg3200.txt +long-submerged pg3200.txt +long-suffering pg3200.txt +long-suffering, pg3200.txt +long-sustained pg3200.txt +long-tailed pg3200.txt +long-time pg3200.txt +long-vanished pg3200.txt +long-winded- pg100.txt +long. pg31100.txt, pg3200.txt, pg100.txt +long." pg31100.txt, pg3200.txt +long.' pg100.txt +long: pg3200.txt +long; pg31100.txt, pg3200.txt, pg100.txt +long;. pg100.txt +long? pg3200.txt, pg100.txt +long?" pg31100.txt, pg3200.txt +longaville pg100.txt +longaville, pg100.txt +longaville. pg100.txt +longaville; pg100.txt +longbourn pg31100.txt +longbourn, pg31100.txt +longbourn. pg31100.txt +longbourn." pg31100.txt +longbourn; pg31100.txt +longed pg31100.txt, pg3200.txt +longer pg31100.txt, pg3200.txt, pg100.txt +longer! pg3200.txt +longer!" pg3200.txt +longer, pg31100.txt, pg3200.txt, pg100.txt +longer--but pg3200.txt +longer--nay, pg31100.txt +longer. pg31100.txt, pg3200.txt, pg100.txt +longer." pg31100.txt, pg3200.txt +longer.--he pg31100.txt +longer: pg100.txt +longer; pg31100.txt, pg3200.txt +longer?" pg3200.txt +longest pg31100.txt, pg3200.txt +longevity pg3200.txt +longfellow pg3200.txt +longfellow's pg3200.txt +longfellow, pg3200.txt +longfellow." pg3200.txt +longing pg31100.txt, pg3200.txt +longing, pg3200.txt, pg100.txt +longing. pg3200.txt, pg100.txt +longingly. pg3200.txt +longings--regrets pg3200.txt +longings. pg3200.txt +longitude pg3200.txt +longitude!" pg3200.txt +longitude, pg3200.txt +longitude. pg3200.txt +longs, pg100.txt +longside pg3200.txt +longstaple?" pg31100.txt +longstreet." pg3200.txt +lonny, pg3200.txt +lono. pg3200.txt +loo! pg100.txt +loof'd, pg100.txt +look pg31100.txt, pg3200.txt, pg100.txt +look! pg31100.txt, pg100.txt +look!" pg3200.txt +look!--did pg31100.txt +look'd pg100.txt +look'st pg100.txt +look, pg31100.txt, pg3200.txt, pg100.txt +look-out pg31100.txt, pg3200.txt +look-out." pg3200.txt +look. pg31100.txt, pg3200.txt, pg100.txt +look." pg3200.txt +look.' pg3200.txt +look: pg3200.txt +look; pg3200.txt, pg100.txt +look? pg100.txt +looked pg31100.txt, pg3200.txt +looked, pg31100.txt, pg3200.txt +looked--the pg3200.txt +looked. pg3200.txt +looked: pg3200.txt +looked; pg3200.txt +looked?" pg31100.txt +looker-on pg100.txt +looker-on, pg3200.txt +looker-on. pg100.txt +lookers-on pg31100.txt +looketh pg3200.txt +looketh." pg3200.txt +looking pg31100.txt, pg3200.txt, pg100.txt +looking, pg31100.txt, pg3200.txt +looking--but pg3200.txt +looking-glass pg100.txt +looking-glass, pg3200.txt, pg100.txt +looking-glass- pg100.txt +looking-glass. pg3200.txt, pg100.txt +looking-glasses pg31100.txt +looking. pg3200.txt +looking." pg3200.txt +looking; pg3200.txt +looking? pg100.txt +looking?" pg31100.txt, pg3200.txt +lookings-on pg31100.txt +lookout pg3200.txt +lookout, pg3200.txt +lookout." pg3200.txt +looks pg31100.txt, pg3200.txt, pg100.txt +looks! pg100.txt +looks, pg31100.txt, pg3200.txt, pg100.txt +looks," pg3200.txt +looks-- pg31100.txt +looks. pg31100.txt, pg3200.txt, pg100.txt +looks." pg31100.txt +looks: pg31100.txt +looks; pg31100.txt, pg3200.txt, pg100.txt +looks? pg100.txt +looky pg3200.txt +lookyhere!" pg3200.txt +loom pg3200.txt +loom." pg3200.txt +loomed pg3200.txt +looming pg3200.txt +loomis--his pg3200.txt +looms pg3200.txt +looms!" pg3200.txt +loon! pg100.txt +loop pg100.txt +loop, pg3200.txt +loophole, pg3200.txt +loos'd; pg100.txt +loose pg3200.txt, pg100.txt +loose! pg100.txt +loose!" pg3200.txt +loose!' pg3200.txt +loose, pg3200.txt, pg100.txt +loose--and pg3200.txt +loose-fit pg3200.txt +loose-wiv'd, pg100.txt +loose. pg3200.txt, pg100.txt +loose." pg3200.txt +loose; pg100.txt +loose?" pg3200.txt +loosely, pg3200.txt +loosened pg3200.txt +loot pg3200.txt +looy pg3200.txt +lope pg3200.txt +lopp'd, pg100.txt +lor; pg3200.txt +lord pg31100.txt, pg3200.txt, pg100.txt +lord! pg31100.txt, pg3200.txt, pg100.txt +lord!" pg3200.txt +lord" pg3200.txt +lord'! pg100.txt +lord'; pg3200.txt +lord's pg3200.txt, pg100.txt +lord's, pg3200.txt +lord, pg3200.txt, pg100.txt +lord," pg3200.txt +lord- pg100.txt +lord-- pg100.txt +lord. pg100.txt +lord." pg3200.txt +lord.' pg100.txt +lord.] pg100.txt +lord: pg3200.txt, pg100.txt +lord; pg3200.txt, pg100.txt +lord? pg3200.txt, pg100.txt +lord?" pg3200.txt +lorde. pg3200.txt +lorded, pg100.txt +lording pg3200.txt +lordliness pg100.txt +lordling pg3200.txt +lords pg3200.txt, pg100.txt +lords! pg100.txt +lords, pg100.txt +lords- pg100.txt +lords. pg3200.txt, pg100.txt +lords; pg100.txt +lords? pg100.txt +lords?" pg3200.txt +lords?' pg100.txt +lordship pg3200.txt, pg100.txt +lordship! pg100.txt +lordship's pg3200.txt +lordship, pg100.txt +lordship. pg3200.txt, pg100.txt +lordship; pg100.txt +lordship? pg100.txt +lordships pg100.txt +lordships! pg100.txt +lordy, pg3200.txt +lore. pg3200.txt +lore; pg3200.txt +lorelei pg3200.txt +lorelei" pg3200.txt +lorelei." pg3200.txt +lorelei] pg3200.txt +lorenzo pg100.txt +lorenzo! pg100.txt +lorenzo, pg100.txt +lorenzo. pg100.txt +lorettes. pg3200.txt +lorne. pg3200.txt +lorraine. pg3200.txt +lorraine; pg100.txt +los pg100.txt +los'. pg3200.txt +los'." pg3200.txt +lose pg31100.txt, pg3200.txt, pg100.txt +lose, pg3200.txt, pg100.txt +lose. pg3200.txt, pg100.txt +lose." pg31100.txt +lose: pg100.txt +lose; pg100.txt +loser pg31100.txt, pg3200.txt +loser. pg100.txt +loser? pg100.txt +loses pg3200.txt, pg100.txt +losing pg31100.txt, pg3200.txt, pg100.txt +losing, pg31100.txt +losing. pg3200.txt, pg100.txt +loss pg31100.txt, pg3200.txt, pg100.txt +loss! pg100.txt +loss, pg3200.txt, pg100.txt +loss--a, pg3200.txt +loss. pg31100.txt, pg3200.txt, pg100.txt +loss." pg31100.txt +loss: pg100.txt +loss; pg3200.txt, pg100.txt +loss? pg100.txt +losses pg31100.txt +losses! pg100.txt +losses, pg31100.txt, pg100.txt +losses. pg100.txt +losses." pg3200.txt +losses; pg100.txt +lost pg31100.txt, pg3200.txt, pg100.txt +lost! pg100.txt +lost!" pg3200.txt +lost!' pg3200.txt +lost, pg31100.txt, pg3200.txt, pg100.txt +lost- pg100.txt +lost--lost pg3200.txt +lost--lost." pg3200.txt +lost--maybe pg3200.txt +lost--sinks pg3200.txt +lost. pg31100.txt, pg3200.txt, pg100.txt +lost." pg3200.txt +lost: pg3200.txt +lost; pg31100.txt, pg3200.txt, pg100.txt +lost? pg3200.txt, pg100.txt +lost?" pg3200.txt +lot pg31100.txt, pg3200.txt, pg100.txt +lot! pg3200.txt +lot!" pg3200.txt +lot, pg3200.txt, pg100.txt +lot--'" pg3200.txt +lot. pg3200.txt +lot." pg3200.txt +lot: pg3200.txt +lot; pg3200.txt, pg100.txt +lot? pg3200.txt +loth pg3200.txt +lothrop's." pg3200.txt +lotos pg3200.txt +lots pg3200.txt +lots. pg3200.txt +lott'ry; pg100.txt +lottery pg31100.txt, pg3200.txt +lottery. pg3200.txt +lottie. pg3200.txt +loud pg31100.txt, pg3200.txt, pg100.txt +loud'st. pg100.txt +loud, pg31100.txt, pg3200.txt, pg100.txt +loud--we, pg3200.txt +loud. pg31100.txt, pg3200.txt, pg100.txt +loud." pg3200.txt +loud: pg3200.txt +loud; pg3200.txt, pg100.txt +loud? pg100.txt +louder pg3200.txt, pg100.txt +louder! pg100.txt +louder, pg31100.txt, pg3200.txt +louder. pg100.txt +loudest, pg3200.txt +loudest. pg3200.txt +loudly pg3200.txt +loudly. pg3200.txt +louis pg3200.txt +louis, pg3200.txt +louis-- pg3200.txt +louis. pg3200.txt +louis.' pg3200.txt +louis: pg3200.txt +louis; pg3200.txt +louis?" pg3200.txt +louisa pg31100.txt, pg3200.txt +louisa! pg3200.txt +louisa!" pg31100.txt +louisa's pg31100.txt +louisa, pg31100.txt, pg3200.txt +louisa. pg31100.txt +louisa; pg31100.txt +louisa?" pg31100.txt +louise pg3200.txt +louise, pg3200.txt +louise,--not pg3200.txt +louise." pg3200.txt +louisiana, pg3200.txt +louisiana; pg3200.txt +louisville, pg3200.txt +lounge, pg3200.txt +loungers pg3200.txt +lounsbury, pg3200.txt +lounsbury. pg3200.txt +loup pg3200.txt +loup--and pg3200.txt +lour? pg100.txt +lourdement, pg3200.txt +lourdes pg3200.txt +louse-brat!' pg3200.txt +louse: pg100.txt +lousy. pg100.txt +lout, pg100.txt +lout. pg100.txt +louts pg100.txt +louvre, pg3200.txt +louvre. pg3200.txt, pg100.txt +louvre." pg3200.txt +lov'd pg100.txt +lov'd, pg100.txt +lov'd. pg100.txt +lov'd; pg100.txt +lov'd? pg100.txt +lov'st, pg100.txt +lov'st; pg100.txt +lov- pg3200.txt +lovable pg3200.txt +lovable, pg3200.txt +lovable--why, pg3200.txt +love pg31100.txt, pg3200.txt, pg100.txt +love! pg31100.txt, pg100.txt +love!" pg31100.txt +love!' pg3200.txt +love!--oh, pg3200.txt +love'. pg100.txt +love's pg100.txt +love) pg3200.txt, pg100.txt +love, pg31100.txt, pg3200.txt, pg100.txt +love,' pg100.txt +love- pg100.txt +love-- pg3200.txt +love---- pg3200.txt +love-bed, pg100.txt +love-discourse. pg100.txt +love-feast. pg3200.txt +love-in-idleness. pg100.txt +love-line. pg100.txt +love-making pg3200.txt +love-notes pg3200.txt +love-passages; pg3200.txt +love-prate. pg100.txt +love-quarrellings.] pg3200.txt +love-song pg31100.txt +love-song. pg100.txt +love. pg31100.txt, pg3200.txt, pg100.txt +love." pg31100.txt, pg3200.txt +love."' pg100.txt +love.' pg100.txt +love: pg100.txt +love; pg31100.txt, pg3200.txt, pg100.txt +love? pg3200.txt, pg100.txt +love?" pg31100.txt +love?' pg100.txt +loved pg31100.txt, pg3200.txt, pg100.txt +loved!" pg31100.txt +loved, pg31100.txt, pg3200.txt, pg100.txt +loved. pg31100.txt, pg3200.txt, pg100.txt +loved." pg31100.txt +loved; pg100.txt +loved? pg3200.txt +lovel pg100.txt +lovel, pg100.txt +lovel. pg100.txt +lovelane, pg3200.txt +lovelier pg3200.txt, pg100.txt +loveliest pg3200.txt +loveliest. pg3200.txt +loveliness pg31100.txt, pg3200.txt +loveliness, pg3200.txt +loveliness." pg3200.txt +lovell pg100.txt +lovell! pg100.txt +lovell, pg100.txt +lovell? pg100.txt +lovely pg31100.txt, pg3200.txt +lovely! pg3200.txt, pg100.txt +lovely"; pg3200.txt +lovely, pg31100.txt, pg3200.txt +lovely--how pg3200.txt +lovely. pg3200.txt, pg100.txt +lovely." pg3200.txt +lover pg31100.txt, pg3200.txt, pg100.txt +lover! pg31100.txt, pg100.txt +lover" pg3200.txt +lover's pg31100.txt +lover, pg100.txt +lover. pg31100.txt, pg100.txt +lover.' pg100.txt +lover: pg100.txt +lover; pg31100.txt, pg100.txt +lover? pg3200.txt, pg100.txt +lovered? pg100.txt +lovers pg31100.txt, pg3200.txt, pg100.txt +lovers! pg100.txt +lovers' pg31100.txt +lovers, pg3200.txt, pg100.txt +lovers. pg3200.txt, pg100.txt +lovers; pg3200.txt +lovers;--and pg31100.txt +loves pg31100.txt, pg3200.txt, pg100.txt +loves! pg100.txt +loves"; pg100.txt +loves, pg3200.txt, pg100.txt +loves. pg3200.txt, pg100.txt +loves." pg3200.txt +loves; pg100.txt +loves? pg100.txt +lovest, pg100.txt +loveth pg3200.txt +lovier's pg3200.txt +loving pg3200.txt +loving, pg3200.txt, pg100.txt +loving-cup. pg3200.txt +loving-kindness pg3200.txt +loving. pg100.txt +lovinger pg3200.txt +lovingly pg3200.txt +lovingly, pg3200.txt +lovingly. pg3200.txt +low pg31100.txt, pg3200.txt, pg100.txt +low! pg100.txt +low, pg31100.txt, pg3200.txt, pg100.txt +low," pg3200.txt +low--all pg3200.txt +low-down pg3200.txt +low-downest pg3200.txt +low-grade pg3200.txt +low-quarter pg3200.txt +low-spirited pg3200.txt +low-spirited. pg3200.txt +low-spiritedest pg3200.txt +low-voic'd. pg100.txt +low-voiced pg3200.txt +low. pg31100.txt, pg3200.txt, pg100.txt +low: pg3200.txt +low; pg31100.txt, pg3200.txt, pg100.txt +low? pg100.txt +lowdownest pg3200.txt +lowe, pg100.txt +lowell pg3200.txt +lower pg31100.txt, pg3200.txt +lower! pg100.txt +lower) pg31100.txt +lower, pg31100.txt, pg3200.txt, pg100.txt +lower. pg3200.txt, pg100.txt +lower." pg3200.txt +lower: pg100.txt +lower; pg3200.txt +lower?' pg3200.txt +lowered pg31100.txt, pg3200.txt +lowering pg31100.txt +lowest pg3200.txt +lowest. pg3200.txt +lowing pg3200.txt +lowland pg3200.txt +lowliness, pg100.txt +lowly pg31100.txt, pg3200.txt +lown. pg100.txt +lowness, pg100.txt +lowness. pg31100.txt, pg3200.txt +loyal pg3200.txt +loyal, pg100.txt +loyal. pg100.txt +loyalists pg3200.txt +loyalty pg3200.txt, pg100.txt +loyalty! pg100.txt +loyalty" pg3200.txt +loyalty, pg100.txt +loyalty- pg100.txt +loyalty. pg100.txt +loyalty: pg3200.txt, pg100.txt +loyalty? pg100.txt +loyseleur pg3200.txt +loyseleur! pg3200.txt +loyseleur, pg3200.txt +loyseleur. pg3200.txt +lt. pg3200.txt +lubber pg3200.txt +lubber; pg3200.txt +lubbock pg3200.txt +lucas pg31100.txt, pg3200.txt +lucas's." pg31100.txt +lucas, pg31100.txt +lucas. pg31100.txt +lucas." pg31100.txt +lucases pg31100.txt +lucases, pg31100.txt +luce pg100.txt +luce. pg100.txt +lucentio pg100.txt +lucentio, pg100.txt +lucentio- pg100.txt +lucentio. pg100.txt +lucentio? pg100.txt +lucerne pg3200.txt +lucerne,) pg3200.txt +lucerne. pg3200.txt +lucerne?" pg3200.txt +lucetta pg100.txt +lucetta! pg100.txt +lucetta. pg100.txt +luciana pg100.txt +lucianus. pg100.txt +lucifer pg3200.txt +lucifer, pg100.txt +lucifer; pg100.txt +lucilius pg100.txt +lucilius! pg100.txt +lucilius, pg100.txt +lucilius. pg100.txt +lucina; pg3200.txt +lucio pg100.txt +lucio, pg100.txt +lucio. pg100.txt +lucio? pg100.txt +lucius pg100.txt +lucius! pg100.txt +lucius, pg100.txt +lucius. pg3200.txt, pg100.txt +lucius; pg100.txt +lucius? pg100.txt +luck pg31100.txt, pg3200.txt, pg100.txt +luck! pg3200.txt +luck!" pg31100.txt +luck" pg3200.txt +luck, pg31100.txt, pg3200.txt, pg100.txt +luck,' pg3200.txt +luck-- pg3200.txt +luck--the pg3200.txt +luck. pg31100.txt, pg3200.txt, pg100.txt +luck." pg3200.txt +luck.' pg3200.txt +luck; pg3200.txt, pg100.txt +luck? pg3200.txt, pg100.txt +luck?" pg3200.txt +luckily pg31100.txt +luckily, pg31100.txt, pg100.txt +luckless pg3200.txt +lucknow pg3200.txt +lucknow--then pg3200.txt +lucky pg31100.txt, pg3200.txt +lucky!--for pg31100.txt +lucky, pg31100.txt, pg3200.txt +lucky," pg3200.txt +lucky--marrying pg31100.txt +lucky. pg3200.txt +lucky." pg31100.txt +lucky: pg3200.txt +lucky?" pg3200.txt +lucrative pg3200.txt +lucrative, pg3200.txt +lucrative. pg3200.txt +lucre pg100.txt +lucretia, pg3200.txt +lucullus pg100.txt +lucullus; pg100.txt +lucy pg31100.txt, pg3200.txt, pg100.txt +lucy, pg31100.txt, pg100.txt +lucy- pg100.txt +lucy. pg31100.txt +lucy." pg31100.txt +ludicrous pg3200.txt +ludicrous. pg3200.txt +ludicrous." pg3200.txt +ludlow, pg3200.txt +ludlow. pg100.txt +lueger pg3200.txt +lueger, pg3200.txt +lueger: pg3200.txt +lugg'd pg100.txt +luggage, pg3200.txt +luggage--one pg3200.txt +luggage. pg3200.txt +lugging pg3200.txt +lui pg3200.txt +lui, pg3200.txt +luigi pg3200.txt +luigi's pg3200.txt +luigi's, pg3200.txt +luigi, pg3200.txt +luigi-- pg3200.txt +luigi. pg3200.txt +luigi." pg3200.txt +luigi?" pg3200.txt +lukewarm pg3200.txt +lukewarmly, pg3200.txt +lull pg3200.txt +lullaby. pg100.txt +lulled pg3200.txt +lulu. pg3200.txt +lulu." pg3200.txt +lumbago pg3200.txt +lumber pg3200.txt +lumber-camps pg3200.txt +lumbered pg3200.txt +lumbering pg3200.txt +lumbering, pg3200.txt +lumbermen. pg3200.txt +luminous pg3200.txt +luminously pg3200.txt +lump pg3200.txt, pg100.txt +lump, pg100.txt +lump. pg3200.txt +lumps pg3200.txt +lumps, pg3200.txt +lunacies. pg3200.txt, pg100.txt +lunacy pg100.txt +lunacy!" pg3200.txt +lunacy. pg3200.txt, pg100.txt +lunacy? pg100.txt +lunatic pg3200.txt +lunatic! pg3200.txt +lunatic, pg3200.txt, pg100.txt +lunatic. pg3200.txt, pg100.txt +lunatic? pg100.txt +lunatic?" pg3200.txt +lunatics pg3200.txt +lunatics. pg3200.txt +lunch pg3200.txt +lunched. pg3200.txt +luncheon pg3200.txt +luncheon, pg3200.txt +luncheon. pg3200.txt +lunching pg3200.txt +lunes pg100.txt +lung, pg3200.txt +lungs pg31100.txt, pg3200.txt, pg100.txt +lungs. pg31100.txt, pg3200.txt +lupercal pg100.txt +lupercal. pg100.txt +lupus, pg3200.txt +lurch pg3200.txt +lure pg3200.txt +lure. pg100.txt +lured pg3200.txt +lurid pg3200.txt +lurid, pg3200.txt +lurk pg3200.txt, pg100.txt +lurk'd pg100.txt +lurk. pg100.txt +lurked pg3200.txt +lurking-place, pg100.txt +lurks pg3200.txt +luscious pg3200.txt +lust pg3200.txt, pg100.txt +lust, pg100.txt +lust. pg100.txt +lust; pg100.txt +lust? pg100.txt +luster! pg3200.txt +lustihood. pg100.txt +lustre pg31100.txt, pg3200.txt, pg100.txt +lustre, pg100.txt +lustre." pg31100.txt +lustre; pg100.txt +lustrous pg3200.txt +lustrous, pg3200.txt +lusts pg100.txt +lusts, pg100.txt +lusty; pg100.txt +lute pg100.txt +lute, pg31100.txt, pg100.txt +lute. pg100.txt +lute? pg100.txt +lutes. pg100.txt +luther pg3200.txt +lutterell pg31100.txt +lutterell. pg31100.txt +luvis pg31100.txt +luxuriant pg31100.txt, pg3200.txt +luxuriate pg3200.txt +luxuries pg3200.txt +luxuries, pg3200.txt +luxurious pg31100.txt, pg3200.txt +luxurious, pg3200.txt +luxury pg31100.txt, pg3200.txt, pg100.txt +luxury! pg100.txt +luxury, pg3200.txt, pg100.txt +luxury--such pg3200.txt +luxury. pg3200.txt +luxury." pg3200.txt +lv pg3200.txt +lv. pg3200.txt +lvi. pg3200.txt +lvii. pg3200.txt +lviii. pg3200.txt +lx. pg3200.txt +lxi. pg3200.txt +lxii pg3200.txt +lxii. pg3200.txt +lxiii. pg3200.txt +lxiv. pg3200.txt +lxix. pg3200.txt +lxv. pg3200.txt +lxvi. pg3200.txt +lxvii. pg3200.txt +lxviii. pg3200.txt +lxvil pg3200.txt +lxx. pg3200.txt +lxxi. pg3200.txt +lxxii. pg3200.txt +lxxiii. pg3200.txt +lxxiv. pg3200.txt +lxxiv.] pg3200.txt +lxxix. pg3200.txt +lxxv. pg3200.txt +lxxvi. pg3200.txt +lxxvii. pg3200.txt +lxxviii. pg3200.txt +lxxx pg3200.txt +lycaonia, pg100.txt +lyceum pg3200.txt +lydia pg31100.txt +lydia!" pg31100.txt +lydia's pg31100.txt +lydia, pg31100.txt, pg100.txt +lydia," pg31100.txt +lydia--the pg31100.txt +lydia. pg31100.txt +lydia." pg31100.txt +lyin'!" pg3200.txt +lying pg3200.txt +lying! pg100.txt +lying, pg3200.txt +lying. pg3200.txt +lying; pg100.txt +lying? pg3200.txt +lyly), pg3200.txt +lym, pg100.txt +lyman, pg3200.txt +lyme pg31100.txt +lyme, pg31100.txt +lyme. pg31100.txt +lyme." pg31100.txt +lyme; pg31100.txt +lynch pg3200.txt +lynch, pg3200.txt +lynch.' pg3200.txt +lynched. pg3200.txt +lynches, pg3200.txt +lysander pg100.txt +lysander! pg100.txt +lysander, pg100.txt +lysander. pg100.txt +lysander? pg100.txt +lyttelton. pg3200.txt +m'am." pg3200.txt +m'avoir pg3200.txt +m'kays, pg31100.txt +m'kenrie pg31100.txt +m'lord?" pg3200.txt +m'phersons, pg31100.txt +m--" pg3200.txt +m--- pg3200.txt +m----: pg3200.txt +m---. pg31100.txt +m-y pg3200.txt +m-y--g-o-l-d-e-n--arm?" pg3200.txt +m. pg3200.txt +m." pg3200.txt +m., pg3200.txt +m.---- pg3200.txt +m.--gibraltar. pg3200.txt +m.?" pg3200.txt +m.d." pg31100.txt +m.t. pg3200.txt +m.t." pg3200.txt +m.t.] pg3200.txt +m.t.]--i pg3200.txt +m.t.]} pg3200.txt +ma! pg3200.txt +ma'am pg31100.txt +ma'am!" pg31100.txt +ma'am, pg31100.txt +ma'am," pg31100.txt +ma'am. pg31100.txt +ma'am." pg31100.txt +ma'am? pg31100.txt +ma'am?" pg31100.txt +ma'm." pg3200.txt +ma'm?" pg3200.txt +ma's pg3200.txt +ma--there's pg3200.txt +ma." pg3200.txt +ma., pg3200.txt +ma?" pg3200.txt +mab pg31100.txt, pg100.txt +mabie pg3200.txt +mabie. pg3200.txt +mabille. pg3200.txt +mabry pg3200.txt +mabry, pg3200.txt +macalister. pg3200.txt +macaroni pg3200.txt +macaulay: pg3200.txt +macaw; pg3200.txt +macbeth pg31100.txt, pg100.txt +macbeth! pg100.txt +macbeth, pg31100.txt, pg100.txt +macbeth. pg100.txt +maccabaeus. pg100.txt +maccaroni-stuffing pg3200.txt +macdonald pg31100.txt +macdonald, pg31100.txt +macdonald. pg31100.txt +macdonwald- pg100.txt +macduff pg100.txt +macduff, pg100.txt +macduff. pg100.txt +macduff? pg100.txt +mace pg3200.txt +macedon, pg100.txt +maceration pg3200.txt +machetes pg3200.txt +machiavel! pg100.txt +machiavelli, pg3200.txt +machine pg3200.txt +machine), pg3200.txt +machine, pg3200.txt +machine-made pg3200.txt +machine. pg3200.txt +machine? pg3200.txt +machine?" pg3200.txt +machinery pg3200.txt +machinery, pg3200.txt +machinery--and pg3200.txt +machinery. pg3200.txt +machines pg3200.txt +machines, pg3200.txt +machines. pg3200.txt +machines; pg3200.txt +machoire pg3200.txt +mack'rel. pg100.txt +macken--" pg3200.txt +mackenzie. pg3200.txt +mackerel pg3200.txt +mackerel." pg3200.txt +macneill, pg3200.txt +macon. pg3200.txt +macveagh pg3200.txt +macveigh; pg3200.txt +mad! pg100.txt +mad!" pg3200.txt +mad, pg3200.txt, pg100.txt +mad- pg100.txt +mad--mad pg3200.txt +mad-man, pg3200.txt +mad. pg3200.txt, pg100.txt +mad." pg3200.txt +mad; pg3200.txt, pg100.txt +mad? pg3200.txt, pg100.txt +madagascar, pg3200.txt +madagascar. pg3200.txt +madam pg31100.txt, pg3200.txt, pg100.txt +madam! pg100.txt +madam!" pg3200.txt +madam'--" pg3200.txt +madam, pg31100.txt, pg3200.txt, pg100.txt +madam,' pg31100.txt +madam- pg100.txt +madam-' pg100.txt +madam. pg3200.txt, pg100.txt +madam." pg31100.txt, pg3200.txt +madam: pg100.txt +madam; pg100.txt +madam? pg100.txt +madam?" pg31100.txt, pg3200.txt +madame pg31100.txt, pg3200.txt +madame! pg100.txt +madame-- pg3200.txt +madame-----, pg3200.txt +madame. pg3200.txt, pg100.txt +madcap. pg100.txt +madded. pg100.txt +madden pg3200.txt +maddening pg3200.txt +maddening) pg3200.txt +madder pg3200.txt +maddest pg3200.txt +maddison pg31100.txt +made! pg3200.txt, pg100.txt +made!" pg3200.txt +made) pg100.txt +made, pg31100.txt, pg3200.txt, pg100.txt +made--and pg3200.txt +made--every pg3200.txt +made--therefore, pg3200.txt +made--when pg31100.txt +made. pg31100.txt, pg3200.txt, pg100.txt +made." pg31100.txt, pg3200.txt +made.) pg3200.txt +made: pg3200.txt, pg100.txt +made; pg31100.txt, pg3200.txt, pg100.txt +made? pg3200.txt, pg100.txt +made?" pg3200.txt +madeira pg31100.txt +madeira, pg3200.txt +madeiras pg3200.txt +madeiras. pg3200.txt +madly pg31100.txt, pg3200.txt +madman pg3200.txt +madman! pg100.txt +madman!" pg3200.txt +madman's pg100.txt +madman, pg3200.txt +madman--hacked pg3200.txt +madman. pg3200.txt, pg100.txt +madman? pg100.txt +madman]. pg100.txt +madmen pg100.txt +madmen. pg100.txt +madness pg31100.txt, pg3200.txt, pg100.txt +madness! pg100.txt +madness!" pg31100.txt +madness, pg31100.txt, pg100.txt +madness. pg100.txt +madness." pg31100.txt +madness; pg100.txt +madness? pg100.txt +madonna--a pg3200.txt +madonna. pg100.txt +madras pg3200.txt +madras-bombay,.................650 pg3200.txt +madras; pg3200.txt +madrid pg3200.txt +madrigals- pg100.txt +madrigals; pg100.txt +maecenas pg100.txt +maecenas. pg100.txt +mag. pg3200.txt +magazin--" pg3200.txt +magazine pg3200.txt +magazine, pg3200.txt +magazine--it pg3200.txt +magazine-writing pg3200.txt +magazine. pg3200.txt +magazines pg3200.txt +magazines, pg3200.txt +magazines. pg3200.txt +magdala. pg3200.txt +magdalen pg3200.txt +magdalen; pg3200.txt +magdela pg3200.txt +magellan pg3200.txt +maggiore, pg3200.txt +magic pg3200.txt, pg100.txt +magic- pg100.txt +magic." pg3200.txt +magician pg3200.txt +magician's pg3200.txt +magician, pg100.txt +magician. pg3200.txt +magicians pg3200.txt +magistrate pg31100.txt, pg3200.txt, pg100.txt +magistrate's pg3200.txt +magistrate; pg100.txt +magistrates, pg100.txt +magistrates. pg100.txt +magna pg31100.txt +magna. pg31100.txt +magnan.' pg3200.txt +magnanimity pg3200.txt, pg100.txt +magnanimity, pg3200.txt +magnanimity. pg3200.txt +magnanimity; pg3200.txt +magnanimous pg3200.txt +magnanimous, pg100.txt +magnanimous. pg3200.txt +magnate pg3200.txt +magnate, pg3200.txt +magnates pg3200.txt +magnetic pg3200.txt +magnificat. pg3200.txt +magnificence pg3200.txt +magnificence--"women pg3200.txt +magnificence--fragments pg3200.txt +magnificence." pg3200.txt +magnificent pg31100.txt, pg3200.txt +magnificent! pg3200.txt, pg100.txt +magnificent!" pg3200.txt +magnificent--the pg3200.txt +magnificent. pg3200.txt +magnificent." pg3200.txt +magnificently pg3200.txt +magnificently. pg3200.txt +magnificently." pg3200.txt +magnificoes pg100.txt +magnified pg31100.txt, pg3200.txt +magnify pg3200.txt +magnitude pg3200.txt +magnitude, pg3200.txt +magnitude; pg3200.txt +magnolia?" pg3200.txt +magpie pg3200.txt +magyar-tale. pg3200.txt +mahmoud pg3200.txt +mahogany pg3200.txt +mahogany. pg3200.txt +mahomet pg3200.txt +mahomet, pg3200.txt +mahoney. pg3200.txt +mahout pg3200.txt +mahu, pg100.txt +mahu. pg100.txt +maid pg31100.txt, pg3200.txt, pg100.txt +maid! pg100.txt +maid, pg31100.txt, pg3200.txt, pg100.txt +maid- pg100.txt +maid. pg31100.txt, pg3200.txt, pg100.txt +maid." pg31100.txt, pg3200.txt +maid.' pg100.txt +maid.- pg100.txt +maid; pg100.txt +maid;" pg3200.txt +maid? pg100.txt +maidan; pg3200.txt +maiden pg3200.txt +maiden's pg3200.txt +maiden, pg100.txt +maiden-widowed. pg100.txt +maiden; pg100.txt +maiden? pg3200.txt +maidenhead pg100.txt +maidenhead! pg100.txt +maidenhead, pg100.txt +maidenhead; pg100.txt +maidenhead? pg100.txt +maidenheads. pg100.txt +maidenheads? pg100.txt +maidenhedde" pg3200.txt +maidenheddes pg3200.txt +maidenhood pg100.txt +maidenhoods. pg100.txt +maidenly; pg100.txt +maidens, pg3200.txt +maidhood pg100.txt +maids pg3200.txt, pg100.txt +maids, pg31100.txt, pg100.txt +maids. pg3200.txt, pg100.txt +maids.' pg100.txt +maids; pg3200.txt +maids? pg100.txt +mail pg31100.txt, pg3200.txt, pg100.txt +mail, pg3200.txt +mail--immediately. pg3200.txt +mail--these pg3200.txt +mail-bag pg3200.txt +mail-bag. pg3200.txt +mail-bags pg3200.txt +mail-bags, pg3200.txt +mail-clad pg3200.txt +mail-pockets pg3200.txt +mail-sack pg3200.txt +mail-sacks. pg3200.txt +mail. pg3200.txt +mail." pg3200.txt +mailed pg3200.txt +mailed): pg3200.txt +mailed; pg3200.txt +mails pg3200.txt +mails- pg100.txt +mails. pg3200.txt +maim pg100.txt +maim. pg100.txt +maims pg100.txt +maimuna-- pg3200.txt +main pg3200.txt, pg100.txt +main!" pg3200.txt +main' pg3200.txt +main, pg3200.txt, pg100.txt +main. pg3200.txt, pg100.txt +main.' pg3200.txt +maine pg100.txt +maine!'" pg3200.txt +maine, pg100.txt +maine. pg3200.txt, pg100.txt +maine: pg3200.txt +mainland--yes, pg3200.txt +mainly pg31100.txt, pg3200.txt +mainly, pg3200.txt +mainly. pg3200.txt +mainly." pg3200.txt +mainmast. pg31100.txt +mainsail, pg3200.txt +mainsail. pg3200.txt +maintain pg3200.txt, pg100.txt +maintain'd, pg100.txt +maintain, pg31100.txt, pg100.txt +maintain. pg31100.txt, pg100.txt +maintained pg31100.txt, pg3200.txt +maintained, pg31100.txt, pg3200.txt +maintained; pg31100.txt +maintaining pg31100.txt, pg3200.txt +maintains pg3200.txt +maintenance pg3200.txt +maintenance. pg31100.txt +maintenance? pg31100.txt +maintenon, pg3200.txt +mainwaring pg31100.txt +mainwaring's pg31100.txt +mainwaring. pg31100.txt +mainwarings; pg31100.txt +mais pg3200.txt +maisty. pg3200.txt +maize; pg3200.txt +majestic pg31100.txt, pg3200.txt +majestic, pg3200.txt +majestical pg100.txt +majestical, pg100.txt +majestically, pg100.txt +majesties pg100.txt +majesty pg31100.txt, pg3200.txt, pg100.txt +majesty! pg100.txt +majesty!" pg3200.txt +majesty!' pg100.txt +majesty'! pg100.txt +majesty's pg3200.txt, pg100.txt +majesty's. pg100.txt +majesty, pg31100.txt, pg3200.txt, pg100.txt +majesty," pg3200.txt +majesty- pg100.txt +majesty--" pg3200.txt +majesty. pg3200.txt, pg100.txt +majesty." pg3200.txt +majesty: pg3200.txt, pg100.txt +majesty; pg3200.txt, pg100.txt +majesty? pg3200.txt, pg100.txt +major pg3200.txt +major's. pg3200.txt +major, pg3200.txt +major-general pg3200.txt +major; pg3200.txt +majorca), pg3200.txt +majorities: pg3200.txt +majority pg3200.txt, pg100.txt +majority!" pg3200.txt +majority, pg3200.txt +majority. pg3200.txt +majority." pg3200.txt +mak'st pg100.txt +make! pg3200.txt, pg100.txt +make's pg100.txt +make, pg31100.txt, pg3200.txt, pg100.txt +make--a pg3200.txt +make-up pg3200.txt +make-up, pg3200.txt +make. pg31100.txt, pg3200.txt, pg100.txt +make." pg31100.txt, pg3200.txt +make.--she pg31100.txt +make; pg31100.txt, pg3200.txt, pg100.txt +make? pg3200.txt +make?" pg3200.txt +maker pg100.txt +maker. pg3200.txt, pg100.txt +makers. pg3200.txt +makes pg31100.txt, pg3200.txt, pg100.txt +makes!" pg100.txt +makes, pg100.txt +makes. pg3200.txt, pg100.txt +makes; pg100.txt +makest pg3200.txt +maketh pg3200.txt +makin' pg3200.txt +making pg31100.txt, pg3200.txt, pg100.txt +making, pg100.txt +making. pg3200.txt, pg100.txt +making: pg3200.txt +malabat pg3200.txt +malade pg3200.txt +maladies. pg100.txt +malady pg31100.txt, pg3200.txt, pg100.txt +malady, pg31100.txt, pg100.txt +malady--epilepsy. pg3200.txt +malady. pg31100.txt, pg3200.txt, pg100.txt +malaga pg3200.txt +malaga, pg3200.txt +malapert. pg100.txt +malapert; pg100.txt +malaria pg3200.txt +malcolm, pg100.txt +malcolm? pg100.txt +malcontent? pg100.txt +malcontents, pg100.txt +male pg3200.txt, pg100.txt +male. pg3200.txt, pg100.txt +maledictions pg3200.txt +maledisant, pg3200.txt +malefactions; pg100.txt +malefactors? pg100.txt +males pg3200.txt +males, pg3200.txt +males. pg3200.txt +malevolence." pg31100.txt +malice pg31100.txt, pg3200.txt, pg100.txt +malice, pg3200.txt, pg100.txt +malice- pg100.txt +malice--at pg31100.txt +malice. pg100.txt +malice; pg100.txt +malice? pg100.txt +malicious pg31100.txt, pg3200.txt +malicious, pg100.txt +malicious--and pg3200.txt +malicious." pg3200.txt +maliciousest pg3200.txt +maliciously pg3200.txt +maliciously. pg100.txt +malignant pg3200.txt +malignant, pg3200.txt +malignantly pg3200.txt +malignity, pg3200.txt +malignity: pg3200.txt +malignity; pg3200.txt +mallard, pg100.txt +mallet. pg100.txt +mallows. pg100.txt +malmsey-butt pg100.txt +malory, pg3200.txt +malos, pg3200.txt +malos. pg3200.txt +malta pg3200.txt +malta--quarantine; pg3200.txt +malta-gibraltar,.............1,008 pg3200.txt +malta. pg3200.txt +maltreatment pg3200.txt +maltster, pg3200.txt +maltworms; pg100.txt +malvolio pg100.txt +malvolio! pg100.txt +malvolio' pg100.txt +malvolio's pg100.txt +malvolio. pg100.txt +malvolio? pg100.txt +mama, pg31100.txt +mama. pg31100.txt +mamillius pg100.txt +mamillius, pg100.txt +mamma pg31100.txt, pg3200.txt +mamma's pg31100.txt +mamma, pg31100.txt, pg3200.txt +mamma--such pg3200.txt +mamma. pg31100.txt +mamma." pg3200.txt +mamma? pg31100.txt +mamma?" pg3200.txt +mammal, pg3200.txt +mammas pg3200.txt +mammies pg3200.txt +mammoth pg3200.txt +mammy pg3200.txt +mammy?" pg3200.txt +man! pg31100.txt, pg3200.txt, pg100.txt +man!" pg31100.txt, pg3200.txt, pg100.txt +man!' pg3200.txt +man!--that pg31100.txt +man" pg3200.txt +man'? pg100.txt +man's pg31100.txt, pg3200.txt, pg100.txt +man's, pg3200.txt +man's--is pg3200.txt +man's. pg3200.txt, pg100.txt +man's? pg3200.txt, pg100.txt +man), pg31100.txt +man, pg31100.txt, pg3200.txt, pg100.txt +man," pg3200.txt +man- pg100.txt +man-- pg3200.txt +man--' pg3200.txt +man--'" pg3200.txt +man--a pg3200.txt +man--and pg3200.txt +man--charge!" pg3200.txt +man--dat's pg3200.txt +man--enterprise. pg3200.txt +man--got pg3200.txt +man--he pg31100.txt +man--infinitely pg3200.txt +man--proposed pg3200.txt +man--quite pg31100.txt +man--storm pg3200.txt +man--the pg31100.txt, pg3200.txt +man--though pg3200.txt +man-at-arms pg100.txt +man-at-arms--" pg3200.txt +man-at-arms. pg100.txt +man-child pg100.txt +man-factory-- pg3200.txt +man-of-war pg3200.txt +man-of-war's pg3200.txt +man-of-war, pg3200.txt +man-of-war. pg31100.txt, pg3200.txt +man-servant, pg3200.txt +man-servant--a pg3200.txt +man. pg31100.txt, pg3200.txt, pg100.txt +man." pg31100.txt, pg3200.txt +man.' pg3200.txt, pg100.txt +man: pg31100.txt, pg100.txt +man; pg31100.txt, pg3200.txt, pg100.txt +man? pg31100.txt, pg3200.txt, pg100.txt +man?" pg31100.txt, pg3200.txt +manacles pg100.txt +manacles, pg100.txt +manage pg31100.txt, pg3200.txt +manage. pg3200.txt +manage." pg31100.txt +manage?' pg3200.txt +managed pg3200.txt +managed, pg3200.txt +managed. pg31100.txt, pg3200.txt, pg100.txt +managed." pg3200.txt +managed; pg100.txt +managed?" pg31100.txt +management pg3200.txt +management--with pg3200.txt +management. pg3200.txt +management." pg3200.txt +manager pg3200.txt +manager, pg3200.txt +manager. pg31100.txt, pg3200.txt +manager." pg31100.txt +manager.' pg3200.txt +manager?" pg3200.txt +managers. pg3200.txt +manages pg3200.txt +managing pg31100.txt, pg3200.txt, pg100.txt +manchester-street--was pg31100.txt +manchon pg3200.txt +manchon. pg3200.txt +mandalay," pg3200.txt +mandate, pg100.txt +mandeville pg3200.txt +mandragora, pg100.txt +mandragora. pg100.txt +mane pg3200.txt +mane, pg100.txt +mane--france pg3200.txt +manes pg3200.txt +maneuvers pg3200.txt +manfully pg3200.txt +manfully--lost pg3200.txt +manfully. pg3200.txt +mangled pg3200.txt +mangy pg3200.txt +mangy, pg3200.txt +manhood pg3200.txt +manhood, pg3200.txt, pg100.txt +manhood. pg100.txt +manhood; pg3200.txt +mania pg3200.txt +maniac pg3200.txt +maniac. pg3200.txt +maniac; pg3200.txt +maniac? pg3200.txt +maniacs. pg3200.txt +manifest pg3200.txt, pg100.txt +manifest, pg3200.txt +manifest." pg3200.txt +manifest?" pg3200.txt +manifested pg3200.txt, pg100.txt +manifested, pg100.txt +manifested. pg100.txt +manifesting pg3200.txt +manifestly pg3200.txt +manifesto pg3200.txt +manifesto. pg3200.txt +manifold pg3200.txt, pg100.txt +manifold, pg100.txt +manifoldly pg100.txt +manikin?" pg3200.txt +manipulated pg3200.txt +manipulating pg3200.txt +manito pg3200.txt +manito. pg3200.txt +manito; pg3200.txt +manitoes. pg3200.txt +manivet. pg3200.txt +mankind pg31100.txt, pg3200.txt, pg100.txt +mankind! pg100.txt +mankind, pg31100.txt, pg3200.txt +mankind. pg31100.txt, pg3200.txt, pg100.txt +mankind." pg3200.txt +mankind? pg100.txt +manlier pg3200.txt +manlike pg100.txt +manly pg3200.txt +manly. pg100.txt +mann'd pg100.txt +mann'd; pg100.txt +manned pg3200.txt +manner pg31100.txt, pg3200.txt, pg100.txt +manner!" pg31100.txt +manner, pg31100.txt, pg3200.txt +manner," pg31100.txt +manner- pg100.txt +manner-- pg3200.txt +manner--this pg31100.txt +manner--would pg31100.txt +manner. pg31100.txt, pg3200.txt, pg100.txt +manner." pg31100.txt +manner: pg3200.txt +manner:-- pg31100.txt, pg3200.txt +manner; pg31100.txt, pg3200.txt +manner? pg100.txt +manner?" pg31100.txt, pg3200.txt +manner?--not pg3200.txt +mannering--i pg3200.txt +mannerless pg3200.txt +mannerly pg100.txt +mannerly. pg100.txt +manners pg31100.txt, pg3200.txt, pg100.txt +manners!--so pg31100.txt +manners, pg31100.txt, pg3200.txt, pg100.txt +manners. pg31100.txt, pg3200.txt, pg100.txt +manners." pg31100.txt, pg3200.txt +manners; pg31100.txt, pg3200.txt, pg100.txt +manners] pg3200.txt +mannheim pg3200.txt +mannheim, pg3200.txt +mannikin!" pg3200.txt +manoeuvre pg31100.txt +manoeuvres, pg3200.txt +manologists, pg3200.txt +manor pg100.txt +manor-house pg3200.txt +manor-house, pg3200.txt +manor. pg31100.txt +manor?" pg3200.txt +manquait pg3200.txt +mansfield pg31100.txt +mansfield, pg31100.txt, pg3200.txt +mansfield. pg31100.txt +mansfield." pg31100.txt +mansfield; pg31100.txt +mansion pg3200.txt, pg100.txt +mansion" pg3200.txt +mansion, pg31100.txt, pg3200.txt +mansion-and- pg3200.txt +mansion-house. pg31100.txt +mansion. pg31100.txt, pg3200.txt +mansion; pg31100.txt +mansions pg3200.txt +mansions, pg3200.txt +mantel pg3200.txt +mantel, pg3200.txt +mantel-clock. pg3200.txt +mantel-piece pg31100.txt +mantel; pg3200.txt +mantelpiece pg31100.txt +mantelpiece, pg31100.txt, pg3200.txt +mantelpiece. pg31100.txt +mantle pg100.txt +mantle, pg100.txt +mantle] pg100.txt +mantles pg3200.txt +mantua pg100.txt +mantua, pg100.txt +mantua. pg100.txt +manual pg3200.txt +manual, pg3200.txt +manuel pg3200.txt +manufacture pg31100.txt, pg3200.txt +manufacture. pg3200.txt +manufactured pg3200.txt +manufacturers pg3200.txt +manufacturers, pg3200.txt +manufactures pg3200.txt +manufactures. pg3200.txt +manure-pile pg3200.txt +manure-piles pg3200.txt +manure. pg3200.txt +manure?" pg3200.txt +manured pg100.txt +manus. pg100.txt +manuscript pg3200.txt +manuscript--which pg3200.txt +manuscript. pg3200.txt +manuscript: pg3200.txt +manuscript; pg31100.txt +manuscript? pg3200.txt +manuscripts pg3200.txt +manuscripts. pg3200.txt +many pg31100.txt, pg3200.txt, pg100.txt +many!'" pg3200.txt +many's pg3200.txt +many, pg31100.txt, pg3200.txt, pg100.txt +many- pg3200.txt, pg100.txt +many-centuries-later pg3200.txt +many-colored pg3200.txt +many. pg31100.txt, pg3200.txt, pg100.txt +many." pg3200.txt +many; pg31100.txt, pg3200.txt +many?" pg31100.txt, pg3200.txt +many?' pg3200.txt +maori pg3200.txt +maoris pg3200.txt +maow." pg3200.txt +map pg3200.txt, pg100.txt +map, pg3200.txt +map--" pg3200.txt +map. pg3200.txt, pg100.txt +map." pg3200.txt +map; pg3200.txt +map?" pg3200.txt +mapes pg3200.txt +maple pg31100.txt +mapping pg3200.txt +maps pg3200.txt +maps, pg3200.txt +maps. pg3200.txt +maps; pg3200.txt +mar pg3200.txt, pg100.txt +mar. pg100.txt +marauders. pg3200.txt +marauding pg31100.txt +marble pg3200.txt +marble! pg3200.txt +marble, pg31100.txt, pg3200.txt, pg100.txt +marble- pg3200.txt +marble--5. pg3200.txt +marble--precious pg3200.txt +marble-topped pg3200.txt +marble. pg3200.txt, pg100.txt +marble; pg3200.txt +marbled pg3200.txt +marbles pg3200.txt +marbles, pg3200.txt +marcade pg100.txt +marcade; pg100.txt +marcellus! pg100.txt +marcellus, pg100.txt +marcellus. pg100.txt +marcellus? pg100.txt +march pg31100.txt, pg3200.txt, pg100.txt +march!" pg3200.txt +march!--eastward, pg3200.txt +march!--on pg3200.txt +march, pg3200.txt, pg100.txt +march--check. pg3200.txt +march. pg31100.txt, pg3200.txt, pg100.txt +march." pg3200.txt +march: pg100.txt +march; pg3200.txt, pg100.txt +march? pg100.txt +march] pg100.txt +marched pg3200.txt +marched, pg3200.txt +marches pg3200.txt +marches, pg3200.txt +marches." pg3200.txt +marching pg3200.txt, pg100.txt +marching, pg3200.txt +marching. pg100.txt +marching." pg3200.txt +marcius pg100.txt +marcius! pg100.txt +marcius!' pg100.txt +marcius, pg100.txt +marcius. pg100.txt +marcius; pg100.txt +marcius? pg100.txt +marco pg3200.txt +marco's: pg3200.txt +marco, pg3200.txt +marcos pg3200.txt +marcus pg3200.txt, pg100.txt +marcus, pg100.txt +marcus. pg100.txt +mardian pg100.txt +mardian! pg100.txt +mardian, pg100.txt +mardian. pg100.txt +mardian] pg100.txt +mare pg31100.txt, pg3200.txt +mare, pg100.txt +mare. pg100.txt +marengo, pg3200.txt +marengo. pg3200.txt +margarelon pg100.txt +margaret pg31100.txt, pg3200.txt, pg100.txt +margaret! pg100.txt +margaret's pg31100.txt +margaret's. pg100.txt +margaret, pg100.txt +margaret-- pg31100.txt +margaret. pg3200.txt, pg100.txt +margaret." pg3200.txt +margaret: pg100.txt +margaret? pg100.txt +margaret?" pg3200.txt +margaretta; pg3200.txt +margent pg100.txt +margery, pg3200.txt, pg100.txt +marget pg3200.txt +marget's pg3200.txt +marget, pg3200.txt +marget. pg3200.txt +margin pg3200.txt +margin; pg3200.txt +margin?" pg3200.txt +marginal pg3200.txt +marguerite." pg3200.txt +marguy pg3200.txt +marguy; pg3200.txt +marhaus pg3200.txt +marhaus. pg3200.txt +maria pg31100.txt, pg3200.txt, pg100.txt +maria! pg100.txt +maria" pg31100.txt +maria's pg31100.txt +maria, pg31100.txt, pg3200.txt, pg100.txt +maria--" pg3200.txt +maria. pg31100.txt +maria." pg31100.txt +maria? pg100.txt +mariana pg100.txt +mariana. pg100.txt +marianne pg31100.txt +marianne!" pg31100.txt +marianne's pg31100.txt +marianne's, pg31100.txt +marianne's. pg31100.txt +marianne's; pg31100.txt +marianne, pg31100.txt +marianne--he pg31100.txt +marianne--still pg31100.txt +marianne. pg31100.txt +marianne." pg31100.txt +marianne; pg31100.txt +marianne?" pg31100.txt +marianne_, pg31100.txt +marie pg3200.txt +marie. pg3200.txt +marimana, pg3200.txt +marine pg3200.txt +marine. pg3200.txt +marinel, pg3200.txt +mariner pg3200.txt, pg100.txt +mariner's pg3200.txt +mariner, pg3200.txt +mariner. pg3200.txt +mariners pg100.txt +mariners.) pg3200.txt +marion pg3200.txt +maritime pg100.txt +marjoram. pg100.txt +marjoram; pg100.txt +marjoribanks pg3200.txt +mark pg31100.txt, pg3200.txt, pg100.txt +mark! pg3200.txt +mark!" pg3200.txt +mark!- pg100.txt +mark" pg3200.txt +mark'd pg100.txt +mark'd, pg100.txt +mark's pg3200.txt +mark's. pg3200.txt +mark, pg3200.txt, pg100.txt +mark. pg3200.txt, pg100.txt +mark." pg3200.txt +mark: pg100.txt +mark; pg100.txt +mark] pg100.txt +marked pg31100.txt, pg3200.txt, pg100.txt +marked, pg3200.txt +marked. pg3200.txt +markedly pg31100.txt +market pg3200.txt, pg100.txt +market, pg3200.txt +market--but pg3200.txt +market--nobody pg3200.txt +market-men pg100.txt +market-place pg100.txt +market-place! pg100.txt +market-place, pg100.txt +market-place. pg100.txt +market-place; pg100.txt +market-square, pg3200.txt +market-square. pg3200.txt +market-town pg31100.txt +market. pg3200.txt, pg100.txt +market." pg3200.txt +market: pg3200.txt +marketable. pg3200.txt, pg100.txt +marketable." pg3200.txt +marketed pg3200.txt +marketplace, pg100.txt +markets pg3200.txt +markets, pg3200.txt +markets. pg3200.txt, pg100.txt +marking pg31100.txt, pg3200.txt, pg100.txt +marking, pg100.txt +marking. pg3200.txt +markiss." pg3200.txt +marks pg31100.txt, pg3200.txt, pg100.txt +marks!" pg3200.txt +marks, pg3200.txt, pg100.txt +marks. pg3200.txt, pg100.txt +marks; pg100.txt +marks? pg100.txt +marks?" pg3200.txt +marksmanship, pg3200.txt +marl? pg100.txt +marlborough." pg31100.txt +marlette. pg3200.txt +marlitt. pg3200.txt +marlow, pg3200.txt +marlow." pg3200.txt +marlow?" pg3200.txt +marlowe pg31100.txt +marlowe. pg31100.txt +marlowes pg31100.txt +marmaduke pg3200.txt +marmalade pg31100.txt +marmora pg3200.txt +maronites, pg3200.txt +marquette pg3200.txt +marquis pg100.txt +marquis. pg100.txt +marquises; pg3200.txt +marr'd, pg100.txt +marr'd. pg100.txt +marred pg3200.txt +marred, pg3200.txt +marriage pg31100.txt, pg3200.txt, pg100.txt +marriage! pg100.txt +marriage, pg31100.txt, pg3200.txt, pg100.txt +marriage--and pg3200.txt +marriage-bed pg100.txt +marriage-bed, pg100.txt +marriage-bed. pg100.txt +marriage-blessing, pg100.txt +marriage-day. pg100.txt +marriage-hour, pg100.txt +marriage. pg31100.txt, pg3200.txt, pg100.txt +marriage." pg31100.txt +marriage.' pg31100.txt +marriage; pg31100.txt, pg3200.txt, pg100.txt +marriage? pg31100.txt, pg100.txt +marriage?" pg31100.txt, pg3200.txt +marriages, pg31100.txt, pg3200.txt, pg100.txt +married pg31100.txt, pg3200.txt, pg100.txt +married!" pg31100.txt +married" pg31100.txt +married, pg31100.txt, pg3200.txt, pg100.txt +married," pg31100.txt +married--married pg31100.txt +married. pg31100.txt, pg3200.txt, pg100.txt +married." pg31100.txt, pg3200.txt +married.' pg3200.txt +married._"] pg31100.txt +married; pg31100.txt, pg3200.txt, pg100.txt +married? pg100.txt +married?" pg31100.txt, pg3200.txt +marries pg3200.txt +marries, pg3200.txt +marrow pg3200.txt +marrow, pg3200.txt +marrow-bones, pg3200.txt +marrow. pg3200.txt +marry pg31100.txt, pg3200.txt, pg100.txt +marry! pg100.txt +marry, pg31100.txt, pg3200.txt, pg100.txt +marry--and pg31100.txt +marry. pg31100.txt, pg100.txt +marry." pg31100.txt +marry; pg31100.txt +marry;--and pg31100.txt +marry? pg31100.txt +marry?--he pg31100.txt +marrying pg31100.txt +marrying)--the pg31100.txt +marrying, pg31100.txt, pg100.txt +marrying. pg100.txt +marrying." pg31100.txt +mars pg3200.txt, pg100.txt +mars! pg100.txt +mars" pg3200.txt +mars, pg100.txt +mars. pg100.txt +mars; pg100.txt +mars? pg100.txt +marse pg3200.txt +marseilles pg3200.txt, pg100.txt +marseilles, pg3200.txt +marsh pg3200.txt +marsh, pg3200.txt +marsh. pg3200.txt, pg100.txt +marshal pg3200.txt, pg100.txt +marshal, pg100.txt +marshal; pg100.txt +marshall pg3200.txt +marshall, pg3200.txt +marshalling pg3200.txt +marshes pg3200.txt +marshes; pg3200.txt +mart pg3200.txt, pg100.txt +mart, pg100.txt +mart. pg100.txt +mart; pg100.txt +martext pg100.txt +martext, pg100.txt +martext. pg100.txt +martha pg3200.txt +martial pg3200.txt +martial! pg100.txt +martin pg31100.txt +martin's pg31100.txt +martin, pg31100.txt +martin," pg31100.txt +martin. pg31100.txt, pg3200.txt +martin." pg31100.txt, pg3200.txt +martin; pg31100.txt +martin?" pg31100.txt +martins pg31100.txt +martins, pg31100.txt +martins. pg31100.txt +martius pg100.txt +martius, pg100.txt +martlemas, pg100.txt +martlet, pg100.txt +marts pg3200.txt +martyr pg3200.txt, pg100.txt +martyr! pg100.txt +martyr." pg3200.txt +martyrdom pg3200.txt +martyrdom!" pg3200.txt +martyrdom, pg3200.txt +martyrs pg31100.txt, pg3200.txt +martyrs. pg3200.txt +marvel pg3200.txt, pg100.txt +marvel, pg3200.txt, pg100.txt +marvel--as pg3200.txt +marvel. pg3200.txt +marvel." pg3200.txt +marvel; pg100.txt +marveled pg3200.txt +marveling pg3200.txt +marvelling; pg3200.txt +marvellous pg3200.txt +marvellous!" pg3200.txt +marvellously pg3200.txt +marvelous pg3200.txt +marvelous!" pg3200.txt +marvelous, pg3200.txt +marvelously pg3200.txt +marvels pg3200.txt +marvels! pg3200.txt +marvels, pg3200.txt +marvels. pg3200.txt +marvels." pg3200.txt +marvels; pg3200.txt +marwarie pg3200.txt +mary pg31100.txt, pg3200.txt +mary!--" pg3200.txt +mary's pg31100.txt, pg3200.txt +mary, pg31100.txt, pg3200.txt +mary-- pg31100.txt +mary--!" pg3200.txt +mary--glad pg3200.txt +mary--if pg3200.txt +mary. pg31100.txt, pg3200.txt +mary." pg31100.txt, pg3200.txt +mary:-- pg31100.txt +mary; pg3200.txt +mary? pg100.txt +mary?" pg3200.txt +mary?--what pg3200.txt +maryborough" pg3200.txt +maryborough. pg3200.txt +mas pg100.txt +masculine, pg3200.txt +masculine. pg3200.txt +mash pg3200.txt +mash." pg3200.txt +masham, pg100.txt +masham. pg100.txt +mashed pg3200.txt +mask pg31100.txt, pg3200.txt, pg100.txt +mask! pg100.txt +mask"--that pg3200.txt +mask'd. pg100.txt +mask'd; pg100.txt +mask, pg100.txt +mask--looks pg3200.txt +mask-ball, pg3200.txt +mask. pg100.txt +mask." pg3200.txt +mask? pg100.txt +mask] pg100.txt +masked pg3200.txt +maskers, pg100.txt +maskers. pg100.txt +maskers] pg100.txt +masks pg100.txt +masks]. pg100.txt +mason pg3200.txt +mason's pg3200.txt +mason, pg100.txt +mason. pg3200.txt +masonry pg3200.txt +masonry, pg3200.txt, pg100.txt +masonry. pg3200.txt +masque pg100.txt +masque; pg100.txt +masquerade pg3200.txt +masquerading pg3200.txt +masquerading. pg3200.txt +masquers pg100.txt +mass pg3200.txt, pg100.txt +mass!" pg3200.txt +mass, pg3200.txt, pg100.txt +mass-meeting pg3200.txt +mass-meetings pg3200.txt +mass. pg3200.txt +mass.: pg3200.txt +massachusetts pg3200.txt +massachusetts, pg3200.txt +massachusetts--vulgar pg3200.txt +massachusetts.", pg3200.txt +massachusetts: pg3200.txt +massacre pg3200.txt, pg100.txt +massacre! pg100.txt +massacre" pg3200.txt +massacre, pg3200.txt, pg100.txt +massacre--enterprise. pg3200.txt +massacre. pg3200.txt, pg100.txt +massacred pg3200.txt +massacres! pg100.txt +massacres, pg100.txt +massed pg3200.txt +massed, pg3200.txt +masses pg3200.txt +masses, pg3200.txt +massive pg3200.txt +massy pg3200.txt +mast pg3200.txt, pg100.txt +mast, pg3200.txt, pg100.txt +mast? pg100.txt +mastcr's pg100.txt +master pg31100.txt, pg3200.txt, pg100.txt +master! pg3200.txt, pg100.txt +master's pg31100.txt, pg3200.txt, pg100.txt +master's, pg100.txt +master's. pg100.txt +master, pg31100.txt, pg3200.txt, pg100.txt +master- pg100.txt +master--" pg3200.txt +master--cauchon. pg3200.txt +master-gunner pg100.txt +master-key pg3200.txt +master-stroke. pg3200.txt +master. pg31100.txt, pg3200.txt, pg100.txt +master." pg31100.txt, pg3200.txt +master; pg31100.txt, pg100.txt +master? pg100.txt +master?" pg3200.txt +masterdom. pg100.txt +mastered pg3200.txt +masterly pg3200.txt +masterly. pg100.txt +masterpiece pg3200.txt +masterpiece! pg3200.txt +masterpiece, pg3200.txt +masterpiece. pg100.txt +masterpieces. pg3200.txt +masters pg3200.txt, pg100.txt +masters! pg100.txt +masters' pg3200.txt +masters'? pg100.txt +masters, pg31100.txt, pg3200.txt, pg100.txt +masters-- pg100.txt +masters. pg31100.txt, pg3200.txt, pg100.txt +masters." pg31100.txt, pg3200.txt +masters? pg3200.txt, pg100.txt +masters] pg3200.txt +mastership. pg100.txt +mastership? pg100.txt +mastery pg3200.txt +masthead pg3200.txt +mastiff, pg3200.txt +mastiffs pg100.txt +mastodon pg3200.txt +mastodon, pg3200.txt +mastodon. pg3200.txt +masts pg3200.txt +mat, pg3200.txt +matabeleland pg3200.txt +match pg31100.txt, pg3200.txt, pg100.txt +match! pg31100.txt, pg100.txt +match!" pg31100.txt, pg3200.txt +match'd, pg100.txt +match, pg31100.txt, pg3200.txt, pg100.txt +match-making, pg31100.txt +match-making. pg31100.txt +match-making." pg31100.txt +match. pg31100.txt, pg3200.txt, pg100.txt +match." pg31100.txt, pg3200.txt +match.'" pg31100.txt +match; pg31100.txt, pg100.txt +match? pg100.txt +match?" pg3200.txt +match?' pg3200.txt +matched pg31100.txt, pg3200.txt +matches pg3200.txt, pg100.txt +matches!" pg3200.txt +matches, pg3200.txt, pg100.txt +matches. pg3200.txt +matches." pg31100.txt +matchless. pg3200.txt +matchlock pg3200.txt +mate pg3200.txt +mate's pg3200.txt +mate, pg3200.txt, pg100.txt +mate. pg3200.txt, pg100.txt +mate." pg3200.txt +mate:-- pg3200.txt +mate?" pg3200.txt +mated, pg3200.txt +mated. pg100.txt +mater, pg3200.txt, pg100.txt +mater. pg3200.txt, pg100.txt +material pg31100.txt, pg3200.txt, pg100.txt +material, pg3200.txt +material. pg3200.txt, pg100.txt +material.--mr. pg31100.txt +material; pg31100.txt +material? pg3200.txt +materialization pg3200.txt +materialize, pg3200.txt +materialize. pg3200.txt +materializee." pg3200.txt +materializee; pg3200.txt +materializing pg3200.txt +materializing. pg3200.txt +materially pg31100.txt, pg3200.txt +materials pg3200.txt +materials! pg3200.txt +materials, pg3200.txt +materials. pg3200.txt +materials." pg31100.txt +maternal pg31100.txt, pg3200.txt +mates pg3200.txt, pg100.txt +mates! pg100.txt +mates, pg3200.txt +mates. pg3200.txt +mates; pg100.txt +mates? pg100.txt +mathematical pg3200.txt +mathematically pg3200.txt +mathematician pg31100.txt +mathematician. pg3200.txt +mathematicians pg3200.txt +mathematicians. pg3200.txt +mathematics, pg100.txt +mathieu, pg3200.txt +matilda pg31100.txt +matin pg3200.txt +matinee?" pg3200.txt +matinees, pg3200.txt +matparamahansrzpairivrajakacharyaswamibhaskaranandasaraswati. pg3200.txt +matrimonial pg31100.txt +matrimony pg31100.txt +matrimony, pg31100.txt, pg3200.txt +matrimony. pg31100.txt +matrimony." pg31100.txt +matron pg3200.txt +matron: pg100.txt +matrons, pg3200.txt +matter pg31100.txt, pg3200.txt, pg100.txt +matter! pg3200.txt +matter!" pg3200.txt +matter" pg3200.txt +matter, pg31100.txt, pg3200.txt, pg100.txt +matter- pg100.txt +matter-- pg3200.txt +matter--" pg3200.txt +matter--an pg100.txt +matter--but pg3200.txt +matter--he pg3200.txt +matter--i pg3200.txt +matter--may pg3200.txt +matter--science pg3200.txt +matter--within pg3200.txt +matter-of-course pg3200.txt +matter-of-fact pg3200.txt +matter. pg31100.txt, pg3200.txt, pg100.txt +matter." pg31100.txt, pg3200.txt +matter.' pg3200.txt +matter.--ha! pg31100.txt +matter.], pg3200.txt +matter: pg31100.txt, pg3200.txt +matter; pg3200.txt, pg100.txt +matter? pg3200.txt, pg100.txt +matter?" pg31100.txt, pg3200.txt +matter?' pg3200.txt +matterhorn pg3200.txt +matterhorn, pg3200.txt +matterhorn--already pg3200.txt +matterhorn. pg3200.txt +matterhorn; pg3200.txt +matters pg31100.txt, pg3200.txt, pg100.txt +matters! pg3200.txt +matters, pg31100.txt, pg3200.txt, pg100.txt +matters--such pg3200.txt +matters. pg3200.txt, pg100.txt +matters." pg31100.txt +matters; pg31100.txt, pg3200.txt, pg100.txt +matters? pg100.txt +matters?" pg31100.txt, pg3200.txt +matters?' pg3200.txt +matthew. pg3200.txt +matthews pg3200.txt +matthews, pg3200.txt +matthews. pg3200.txt +matting pg3200.txt +mattock, pg100.txt +mattress pg3200.txt +mattress, pg3200.txt +mattress. pg100.txt +mattresses. pg3200.txt +mature pg31100.txt, pg3200.txt, pg100.txt +maturity pg3200.txt +maturity. pg3200.txt +maudlin. pg100.txt +maurice, pg3200.txt +maurice: pg3200.txt +maurier pg3200.txt +mauritian, pg3200.txt +mauritius pg3200.txt +mauritius--quarantines pg3200.txt +mauritius. pg3200.txt +mauritius.) pg3200.txt +mauritius; pg3200.txt +mausoleum pg3200.txt +mausoleum, pg3200.txt +mausoleum--5. pg3200.txt +mausoleum: pg3200.txt +maw, pg100.txt +mawnin' pg3200.txt +mawnin'.' pg3200.txt +maws. pg100.txt +max pg3200.txt +maxillaris pg3200.txt +maxim pg3200.txt +maxim, pg31100.txt, pg3200.txt +maxim: pg3200.txt +maximilianus pg3200.txt +maxims pg3200.txt +maxims. pg3200.txt +maximum pg3200.txt +may! pg100.txt +may!" pg3200.txt +may"; pg100.txt +may, pg31100.txt, pg3200.txt, pg100.txt +may," pg3200.txt +may- pg100.txt +may. pg31100.txt, pg3200.txt, pg100.txt +may." pg31100.txt, pg3200.txt +may: pg100.txt +may; pg100.txt +may? pg3200.txt +maybe pg3200.txt +maybe, pg3200.txt +maybe--" pg3200.txt +maybe-so's, pg3200.txt +maybe. pg3200.txt +maybe." pg3200.txt +maybe.' pg3200.txt +maybe?" pg3200.txt +mayday, pg100.txt +mayhap pg3200.txt +mayhem, pg3200.txt +mayhem. pg3200.txt +mayn't pg3200.txt +mayn't!" pg3200.txt +mayor pg3200.txt, pg100.txt +mayor, pg3200.txt, pg100.txt +mayor- pg100.txt +mayor--for pg3200.txt +mayor. pg3200.txt +mayoralty. pg31100.txt +mayst pg100.txt +mayst. pg100.txt +maze pg3200.txt +maze, pg100.txt +mazes pg3200.txt +mazy pg3200.txt +mazzard. pg100.txt +mcaleer. pg3200.txt +mccarter pg3200.txt +mccarthy, pg3200.txt +mcclellan. pg3200.txt +mcclintock pg3200.txt +mcclintock, pg3200.txt +mcclintock; pg3200.txt +mcclure's pg3200.txt +mccomb, pg3200.txt +mccrackan pg3200.txt +mcdowells pg3200.txt +mcelroy pg3200.txt +mcfadden. pg3200.txt +mcfarland pg3200.txt +mcfarland, pg3200.txt +mcflinn pg3200.txt +mcgee, pg3200.txt +mcgregor. pg3200.txt +mckelway.] pg3200.txt +mckelway], pg3200.txt +mckinley pg3200.txt +mcquiston: pg3200.txt +mcspadden pg3200.txt +mcwilliams pg3200.txt +mcwilliams's, pg3200.txt +mcwilliams. pg3200.txt +me! pg31100.txt, pg3200.txt, pg100.txt +me!" pg31100.txt, pg3200.txt +me!' pg31100.txt, pg3200.txt +me!), pg100.txt +me!- pg100.txt +me!--don't pg3200.txt +me!--they pg31100.txt +me" pg31100.txt +me"--such pg3200.txt +me"; pg3200.txt +me'! pg100.txt +me'- pg100.txt +me'? pg100.txt +me) pg3200.txt +me), pg3200.txt +me, pg31100.txt, pg3200.txt, pg100.txt +me," pg31100.txt, pg3200.txt +me,' pg3200.txt +me,) pg3200.txt +me,--and pg3200.txt +me,--it pg31100.txt +me- pg100.txt +me-- pg3200.txt +me--" pg3200.txt +me--(do pg31100.txt +me--. pg31100.txt +me--a pg3200.txt +me--all pg3200.txt +me--and pg3200.txt +me--and--and--oh, pg3200.txt +me--but pg31100.txt, pg3200.txt +me--by pg100.txt +me--calling pg3200.txt +me--clerk pg3200.txt +me--did pg3200.txt +me--do pg31100.txt +me--empties pg3200.txt +me--here, pg3200.txt +me--home! pg3200.txt +me--how pg3200.txt +me--i pg31100.txt, pg3200.txt +me--it pg31100.txt +me--jane pg31100.txt +me--not pg3200.txt +me--now pg3200.txt +me--odious! pg3200.txt +me--please." pg3200.txt +me--porter pg3200.txt +me--said: pg3200.txt +me--salting pg3200.txt +me--she pg3200.txt +me--such pg31100.txt, pg3200.txt +me--the pg3200.txt +me--there pg3200.txt +me--they pg31100.txt +me--uncle pg3200.txt +me--unless pg3200.txt +me--which pg3200.txt +me--will pg3200.txt +me--you pg3200.txt +me-e-e-yow-ow-ow-ow--fzt!--wow!" pg3200.txt +me. pg31100.txt, pg3200.txt, pg100.txt +me." pg31100.txt, pg3200.txt +me.' pg3200.txt, pg100.txt +me.'" pg31100.txt +me.- pg100.txt +me.--'oh,' pg31100.txt +me.--i pg31100.txt +me.... pg3200.txt +me: pg31100.txt, pg3200.txt, pg100.txt +me; pg31100.txt, pg3200.txt, pg100.txt +me;' pg3200.txt +me? pg31100.txt, pg3200.txt, pg100.txt +me?" pg31100.txt, pg3200.txt +me?' pg3200.txt +me?'" pg31100.txt +me?--i pg3200.txt +me?--in pg31100.txt +me?--not pg31100.txt +me] pg3200.txt +mead, pg100.txt +mead. pg100.txt +meadow pg3200.txt +meadow, pg31100.txt, pg3200.txt +meadow-expanses pg3200.txt +meadows pg31100.txt, pg3200.txt +meadows; pg31100.txt +meads, pg100.txt +meal pg3200.txt +meal" pg3200.txt +meal'd pg100.txt +meal, pg31100.txt, pg3200.txt +meal. pg31100.txt, pg3200.txt, pg100.txt +meal." pg3200.txt +meal; pg3200.txt +meal?" pg3200.txt +meal?' pg3200.txt +meals pg3200.txt +meals, pg3200.txt +meals. pg3200.txt, pg100.txt +meals." pg3200.txt +mean pg31100.txt, pg3200.txt, pg100.txt +mean!" pg3200.txt +mean!' pg3200.txt +mean'?" pg31100.txt +mean'st pg100.txt +mean'st? pg100.txt +mean, pg31100.txt, pg3200.txt, pg100.txt +mean- pg100.txt +mean-- pg3200.txt +mean--" pg3200.txt +mean--' pg3200.txt +mean--and pg3200.txt +mean--i pg31100.txt +mean--went, pg3200.txt +mean--when pg3200.txt +mean-apparell'd. pg100.txt +mean-born pg100.txt +mean-spirited pg31100.txt +mean. pg31100.txt, pg3200.txt, pg100.txt +mean." pg31100.txt, pg3200.txt +mean.--where pg31100.txt +mean: pg3200.txt, pg100.txt +mean; pg31100.txt, pg3200.txt, pg100.txt +mean? pg31100.txt, pg3200.txt, pg100.txt +mean?" pg31100.txt, pg3200.txt +mean?' pg3200.txt +mean?--good pg31100.txt +meandered pg3200.txt +meandering pg3200.txt +meaner pg3200.txt +meanest pg3200.txt, pg100.txt +meaneth, pg3200.txt +meaning pg31100.txt, pg3200.txt, pg100.txt +meaning, pg31100.txt, pg3200.txt, pg100.txt +meaning. pg31100.txt, pg3200.txt, pg100.txt +meaning." pg31100.txt, pg3200.txt +meaning; pg31100.txt, pg3200.txt +meaning? pg100.txt +meaning?--you pg3200.txt +meaningless pg3200.txt +meaningless, pg3200.txt +meaningless? pg3200.txt +meaninglessly pg3200.txt +meanings pg3200.txt +meanings. pg3200.txt +meanings? pg100.txt +meanly pg3200.txt, pg100.txt +meanness, pg31100.txt +meanness. pg3200.txt +meannesses, pg3200.txt +means pg31100.txt, pg3200.txt, pg100.txt +means! pg100.txt +means!" pg3200.txt +means) pg31100.txt +means, pg31100.txt, pg3200.txt, pg100.txt +means," pg3200.txt +means--" pg31100.txt +means--by pg3200.txt +means--government pg3200.txt +means. pg31100.txt, pg3200.txt, pg100.txt +means." pg31100.txt, pg3200.txt +means.--m.t. pg3200.txt +means: pg3200.txt +means; pg31100.txt, pg100.txt +means? pg31100.txt, pg100.txt +means?" pg3200.txt +meant pg31100.txt, pg3200.txt, pg100.txt +meant'st, pg100.txt +meant, pg31100.txt, pg3200.txt, pg100.txt +meant--eve pg3200.txt +meant. pg3200.txt +meant." pg31100.txt +meant: pg3200.txt +meant; pg31100.txt, pg3200.txt, pg100.txt +meant? pg3200.txt, pg100.txt +meantime pg3200.txt, pg100.txt +meantime, pg3200.txt, pg100.txt +meantime." pg3200.txt +meantime; pg3200.txt +meanwhile pg31100.txt +meanwhile, pg31100.txt, pg3200.txt +meanwhile. pg3200.txt +measles pg3200.txt, pg100.txt +measles, pg3200.txt +measles. pg3200.txt +measles; pg3200.txt +measurably pg3200.txt +measure pg31100.txt, pg3200.txt, pg100.txt +measure! pg31100.txt +measure!" pg31100.txt +measure"; pg3200.txt +measure, pg31100.txt, pg3200.txt, pg100.txt +measure. pg3200.txt, pg100.txt +measure." pg31100.txt, pg3200.txt +measure;--and pg31100.txt +measure?" pg3200.txt +measured pg31100.txt, pg3200.txt +measured. pg3200.txt +measureless pg3200.txt +measureless. pg3200.txt +measurement pg3200.txt +measurement. pg3200.txt +measurement.]) pg3200.txt +measurements, pg31100.txt +measurements. pg3200.txt +measures pg31100.txt, pg3200.txt +measures, pg31100.txt +measures--in pg31100.txt +measures. pg31100.txt, pg100.txt +measures." pg31100.txt +measures; pg31100.txt +measuring pg3200.txt +meat pg31100.txt, pg3200.txt, pg100.txt +meat, pg3200.txt, pg100.txt +meat. pg3200.txt, pg100.txt +meat." pg3200.txt +meat.' pg3200.txt +meat; pg31100.txt, pg3200.txt +meat? pg100.txt +meats pg100.txt +mecca pg3200.txt +mecca,) pg3200.txt +mecca. pg3200.txt +mechanic pg3200.txt +mechanic, pg3200.txt +mechanic?" pg3200.txt +mechanical, pg100.txt +mechanically pg3200.txt +mechanically, pg3200.txt +mechanically. pg3200.txt +mechanicals, pg100.txt +mechanics' pg3200.txt +mechanics. pg3200.txt +mechanics: pg3200.txt +mechanism. pg3200.txt +med'cinable pg100.txt +med'cine, pg100.txt +medal pg3200.txt +medals pg3200.txt +meddle pg3200.txt, pg100.txt +meddle. pg3200.txt, pg100.txt +meddled pg3200.txt +meddler pg100.txt +meddler, pg3200.txt, pg100.txt +meddlest pg3200.txt +meddling pg3200.txt +meddling'--and pg3200.txt +media, pg100.txt +medical pg31100.txt, pg3200.txt +medicinal. pg3200.txt +medicine pg3200.txt, pg100.txt +medicine! pg3200.txt +medicine, pg3200.txt +medicine-chest, pg3200.txt +medicine. pg100.txt +medicine." pg3200.txt +medicine.' pg3200.txt +medicine?" pg3200.txt +medicines pg31100.txt, pg3200.txt +medicines!" pg3200.txt +medicines, pg3200.txt +medicis pg3200.txt +medieval pg3200.txt +mediocrity, pg3200.txt +mediocrity,) pg3200.txt +meditate pg31100.txt +meditated. pg3200.txt +meditates. pg100.txt +meditating pg3200.txt, pg100.txt +meditating, pg31100.txt +meditating. pg3200.txt +meditation pg3200.txt +meditation. pg31100.txt, pg3200.txt, pg100.txt +meditation; pg100.txt +meditations pg31100.txt, pg100.txt +meditations, pg31100.txt +meditations? pg100.txt +meditative pg3200.txt +meditative, pg3200.txt +meditatively pg3200.txt +meditatively, pg3200.txt +mediterranean pg3200.txt +mediterranean! pg3200.txt +mediterranean." pg31100.txt +mediterranean; pg3200.txt +medium pg31100.txt, pg3200.txt, pg100.txt +medium, pg31100.txt, pg3200.txt, pg100.txt +medium. pg3200.txt +medlar. pg100.txt +medlar." pg3200.txt +medlar? pg100.txt +meed pg100.txt +meed! pg100.txt +meed, pg100.txt +meed. pg100.txt +meeds, pg100.txt +meedyevil pg3200.txt +meek pg3200.txt +meek, pg3200.txt +meekly pg3200.txt +meekly-- pg3200.txt +meekness pg3200.txt +meet pg31100.txt, pg3200.txt, pg100.txt +meet! pg31100.txt +meet!" pg31100.txt +meet'st pg100.txt +meet, pg31100.txt, pg3200.txt, pg100.txt +meet. pg31100.txt, pg3200.txt, pg100.txt +meet." pg3200.txt +meet: pg100.txt +meet; pg31100.txt, pg100.txt +meet;- pg100.txt +meet? pg100.txt +meeter pg3200.txt +meetest pg100.txt +meeting pg31100.txt, pg3200.txt, pg100.txt +meeting, pg31100.txt, pg3200.txt, pg100.txt +meeting--and pg31100.txt +meeting. pg31100.txt, pg3200.txt, pg100.txt +meeting." pg3200.txt +meeting: pg3200.txt +meeting?" pg3200.txt +meetings pg31100.txt, pg3200.txt +meetings, pg31100.txt, pg3200.txt, pg100.txt +meetings--monthly--for pg3200.txt +meetings. pg31100.txt, pg3200.txt +meetly. pg100.txt +meetness, pg100.txt +meets pg31100.txt, pg3200.txt, pg100.txt +meets, pg100.txt +meg! pg100.txt +meidling pg3200.txt +meidling. pg3200.txt +meienwand. pg3200.txt +mein pg3200.txt +meine pg3200.txt +meine] pg3200.txt +meinem pg3200.txt +meiner pg3200.txt +meiny, pg100.txt +meiringen, pg3200.txt +meisen. pg100.txt +meisterschaft pg3200.txt +meisterschaft! pg3200.txt +meisterschaft, pg3200.txt +melancholy pg31100.txt, pg3200.txt, pg100.txt +melancholy! pg31100.txt, pg100.txt +melancholy, pg31100.txt, pg100.txt +melancholy. pg31100.txt, pg100.txt +melancholy; pg31100.txt, pg3200.txt +melancholy? pg100.txt +melbourne pg3200.txt +melbourne, pg3200.txt +melbourne. pg3200.txt +melbourne." pg3200.txt +melchisedek! pg3200.txt +melee pg3200.txt +mellahah pg3200.txt +mellow pg3200.txt, pg100.txt +mellow, pg3200.txt, pg100.txt +mellow-sounding pg3200.txt +mellville. pg3200.txt +melodeon pg3200.txt +melodeon. pg3200.txt +melodeon; pg3200.txt +melodious pg3200.txt +melodramatic pg3200.txt +melody pg3200.txt, pg100.txt +melody. pg3200.txt, pg100.txt +melody; pg100.txt +melody? pg100.txt +melon pg3200.txt +melon, pg3200.txt +melons pg3200.txt +melons, pg3200.txt +melt pg3200.txt, pg100.txt +melt, pg3200.txt, pg100.txt +melt. pg100.txt +melted pg3200.txt, pg100.txt +melted, pg100.txt +melting pg3200.txt +meltings pg3200.txt +melts pg3200.txt, pg100.txt +melts, pg100.txt +melun pg100.txt +melun, pg3200.txt +melun. pg100.txt +member pg31100.txt, pg3200.txt, pg100.txt +member, pg3200.txt +member. pg3200.txt +member." pg3200.txt +member.'" pg3200.txt +member?" pg3200.txt +members pg3200.txt, pg100.txt +members" pg3200.txt +members, pg31100.txt, pg3200.txt +members--now pg3200.txt +members-thirty-two pg3200.txt +members. pg3200.txt, pg100.txt +members." pg3200.txt +members; pg3200.txt +members?" pg3200.txt +membership pg3200.txt +membership--cats pg3200.txt +membership. pg3200.txt, pg100.txt +membership.>> pg100.txt +membership: pg3200.txt +membership; pg3200.txt +meme. pg3200.txt +memento pg31100.txt, pg3200.txt +memento-factory, pg3200.txt +memento-magazine. pg3200.txt +mementoes pg3200.txt +memnonists, pg3200.txt +memoir pg3200.txt +memoirs pg3200.txt +memoirs, pg3200.txt +memoirs. pg3200.txt +memorable pg3200.txt +memorably pg3200.txt +memoranda pg3200.txt +memoranda. pg3200.txt +memorandum pg3200.txt +memorandum-book pg3200.txt +memorandum-book. pg3200.txt +memorandum-book; pg3200.txt +memorandum-books. pg3200.txt +memorial pg31100.txt, pg3200.txt, pg100.txt +memorial, pg3200.txt +memorials pg31100.txt, pg3200.txt +memoriam pg3200.txt +memories pg31100.txt, pg3200.txt, pg100.txt +memories! pg3200.txt +memories. pg3200.txt, pg100.txt +memorising.] pg3200.txt +memoriz'd. pg100.txt +memorize pg3200.txt +memorized pg3200.txt +memorizing pg3200.txt +memory pg31100.txt, pg3200.txt, pg100.txt +memory! pg31100.txt, pg100.txt +memory, pg31100.txt, pg3200.txt, pg100.txt +memory--and pg3200.txt +memory--which pg3200.txt +memory-exhibition. pg3200.txt +memory-tablet pg3200.txt +memory. pg31100.txt, pg3200.txt, pg100.txt +memory." pg31100.txt, pg3200.txt +memory: pg100.txt +memory; pg31100.txt, pg3200.txt +memphis pg3200.txt +memphis, pg3200.txt +memphis. pg3200.txt +men pg31100.txt, pg3200.txt, pg100.txt +men! pg31100.txt, pg3200.txt, pg100.txt +men!" pg3200.txt +men!' pg3200.txt +men'; pg100.txt +men's pg3200.txt, pg100.txt +men's! pg100.txt +men's; pg100.txt +men), pg100.txt +men, pg31100.txt, pg3200.txt, pg100.txt +men- pg100.txt +men-" pg100.txt +men-- pg100.txt +men--" pg3200.txt +men--but pg3200.txt +men-at-arms pg3200.txt +men-at-arms, pg3200.txt +men-at-arms--few pg3200.txt +men-at-arms. pg3200.txt, pg100.txt +men-at-arms." pg3200.txt +men-at-arms; pg3200.txt +men-at-arms?" pg3200.txt +men-of-war pg3200.txt +men. pg31100.txt, pg3200.txt, pg100.txt +men." pg31100.txt, pg3200.txt +men: pg31100.txt, pg3200.txt, pg100.txt +men; pg3200.txt, pg100.txt +men? pg31100.txt, pg100.txt +men?" pg3200.txt +menac'd pg100.txt +menace pg3200.txt +menacingly: pg3200.txt +menagerie pg3200.txt +menagerie, pg3200.txt +menagerie. pg3200.txt +menas pg100.txt +menas! pg100.txt +menas, pg100.txt +menas. pg100.txt +mend pg31100.txt, pg3200.txt, pg100.txt +mend! pg100.txt +mend, pg31100.txt, pg100.txt +mend. pg100.txt +mend: pg100.txt +mend; pg100.txt +mend? pg100.txt +mendacity! pg3200.txt +mended pg31100.txt, pg3200.txt, pg100.txt +mended, pg100.txt +mended. pg3200.txt, pg100.txt +mended; pg3200.txt +mended? pg100.txt +mendicancy pg3200.txt +mendicancy. pg3200.txt +mendicant pg3200.txt +mendicant); pg3200.txt +mendicants, pg3200.txt +mending pg3200.txt +mending; pg31100.txt +mends; pg100.txt +menelaus pg100.txt +menelaus, pg100.txt +menelaus. pg100.txt +menelaus; pg100.txt +menelaus? pg100.txt +menenius pg100.txt +menenius, pg100.txt +menenius. pg100.txt +menenius? pg100.txt +mengette. pg3200.txt +menial pg3200.txt +mennonites pg3200.txt +mental pg31100.txt, pg3200.txt +mental. pg3200.txt +mentality pg3200.txt +mentally. pg3200.txt +menteith. pg100.txt +mention pg31100.txt, pg3200.txt, pg100.txt +mention'd, pg100.txt +mention, pg3200.txt +mention. pg3200.txt +mention." pg31100.txt, pg3200.txt +mention; pg31100.txt +mentioned pg31100.txt, pg3200.txt +mentioned, pg31100.txt, pg3200.txt +mentioned--make pg3200.txt +mentioned--or pg3200.txt +mentioned--the pg3200.txt +mentioned. pg31100.txt, pg3200.txt +mentioned; pg3200.txt +mentioned? pg31100.txt +mentioned?" pg31100.txt, pg3200.txt +mentioning pg31100.txt, pg3200.txt +mentioning, pg3200.txt +mentioning--she pg31100.txt +mentioning. pg3200.txt +mentions pg3200.txt +mentions. pg3200.txt +mentis pg3200.txt +menton? pg100.txt +mentone, pg3200.txt +menu pg3200.txt +menu. pg3200.txt +meow?" pg3200.txt +mephostophilus! pg100.txt +mercantile pg3200.txt +mercatio? pg100.txt +mercedes pg3200.txt +mercedes. pg3200.txt +mercenaries; pg100.txt +mercenary. pg100.txt +mercenary." pg31100.txt +mercer pg100.txt +merchandise pg3200.txt +merchandise. pg100.txt +merchant pg31100.txt, pg3200.txt, pg100.txt +merchant, pg3200.txt, pg100.txt +merchant. pg100.txt +merchant." pg31100.txt +merchant; pg100.txt +merchant? pg100.txt +merchants pg3200.txt +merchants' pg3200.txt +merchants, pg3200.txt, pg100.txt +merchants. pg100.txt +merchants? pg100.txt +mercies pg100.txt +merciful pg3200.txt +merciful! pg100.txt +merciful, pg100.txt +merciful. pg3200.txt, pg100.txt +merciful; pg100.txt +mercifully pg3200.txt +mercifully; pg100.txt +merciless; pg100.txt +mercuries. pg100.txt +mercury pg100.txt +mercury'; pg100.txt +mercury, pg100.txt +mercury. pg3200.txt, pg100.txt +mercury.' pg100.txt +mercutio! pg100.txt +mercutio. pg100.txt +mercutio? pg100.txt +mercy pg31100.txt, pg3200.txt, pg100.txt +mercy! pg100.txt +mercy!--do pg3200.txt +mercy, pg31100.txt, pg3200.txt, pg100.txt +mercy- pg100.txt +mercy. pg3200.txt, pg100.txt +mercy.) pg3200.txt +mercy; pg100.txt +mercy? pg100.txt +mere pg31100.txt, pg3200.txt, pg100.txt +merely pg31100.txt, pg3200.txt, pg100.txt +merely, pg3200.txt +merely. pg100.txt +merely." pg3200.txt +merest pg3200.txt +merged pg3200.txt +meridian pg3200.txt +meridian--the pg3200.txt +merit pg31100.txt, pg3200.txt, pg100.txt +merit, pg31100.txt, pg3200.txt, pg100.txt +merit. pg31100.txt, pg3200.txt, pg100.txt +merit." pg31100.txt, pg3200.txt +merit; pg100.txt +merit? pg100.txt +merited pg3200.txt, pg100.txt +meritorious, pg100.txt +merits pg31100.txt, pg3200.txt, pg100.txt +merits, pg31100.txt, pg3200.txt +merits. pg31100.txt, pg3200.txt, pg100.txt +merits." pg3200.txt +merits: pg3200.txt +merits? pg3200.txt +merlin pg3200.txt +merlin!" pg3200.txt +merlin, pg3200.txt +merlin." pg3200.txt +merrier. pg100.txt +merriest pg3200.txt, pg100.txt +merrily pg3200.txt, pg100.txt +merrily! pg100.txt +merrily, pg100.txt +merrily. pg100.txt +merrily; pg100.txt +merrily? pg100.txt +merriment pg3200.txt, pg100.txt +merriment, pg3200.txt, pg100.txt +merriment. pg31100.txt, pg3200.txt, pg100.txt +merriments; pg100.txt +merriness. pg100.txt +merritt pg3200.txt +merry pg31100.txt, pg3200.txt, pg100.txt +merry! pg100.txt +merry, pg31100.txt, pg100.txt +merry. pg100.txt +merry." pg31100.txt +merry: pg100.txt +merry; pg100.txt +merry? pg100.txt +merwin. pg3200.txt +merwin: pg3200.txt +meryton pg31100.txt +meryton!" pg31100.txt +meryton, pg31100.txt +meryton. pg31100.txt +meryton." pg31100.txt +meryton; pg31100.txt +meryton? pg31100.txt +meryton?" pg31100.txt +mesalliances. pg3200.txt +meseemeth--" pg3200.txt +mesh pg3200.txt +meshes pg3200.txt, pg100.txt +mesmerism. pg3200.txt +mesmerism; pg3200.txt +mesmerist pg3200.txt +mesmerist, pg3200.txt +mesmerizer-button, pg3200.txt +mesopotamia pg3200.txt +mess pg3200.txt +mess- pg100.txt +mess-room pg3200.txt +mess. pg31100.txt, pg3200.txt +mess; pg3200.txt, pg100.txt +message pg31100.txt, pg3200.txt, pg100.txt +message! pg31100.txt +message, pg31100.txt, pg3200.txt +message. pg31100.txt, pg3200.txt, pg100.txt +message; pg31100.txt, pg3200.txt, pg100.txt +message? pg3200.txt, pg100.txt +message?" pg31100.txt +messages pg3200.txt +messages. pg100.txt +messages; pg31100.txt, pg3200.txt +messages?" pg3200.txt +messala pg100.txt +messala! pg100.txt +messala, pg100.txt +messala. pg100.txt +messala: pg100.txt +messala; pg100.txt +messalina, pg3200.txt +messenger pg3200.txt, pg100.txt +messenger, pg31100.txt, pg100.txt +messenger- pg100.txt +messenger. pg3200.txt, pg100.txt +messenger." pg31100.txt, pg3200.txt +messenger.] pg100.txt +messenger; pg3200.txt +messenger? pg100.txt +messengers pg100.txt +messengers! pg100.txt +messengers, pg100.txt +messengers. pg100.txt +messes pg100.txt +messiah pg3200.txt +messina pg100.txt +messina, pg3200.txt +messina. pg100.txt +mesurer. pg3200.txt +met pg31100.txt, pg3200.txt, pg100.txt +met! pg31100.txt, pg100.txt +met, pg31100.txt, pg3200.txt, pg100.txt +met. pg3200.txt, pg100.txt +met." pg31100.txt +met; pg100.txt +met? pg31100.txt, pg3200.txt, pg100.txt +met?" pg31100.txt +metal pg3200.txt +metal, pg3200.txt, pg100.txt +metal. pg3200.txt +metal?" pg3200.txt +metallurgy, pg3200.txt +metals pg3200.txt +metals, pg31100.txt +metamorphoses; pg100.txt +metamorphosis." pg3200.txt +metaphor pg3200.txt +metaphor, pg3200.txt +metaphor. pg100.txt +metaphor." pg3200.txt +metaphor? pg100.txt +metaphor?" pg3200.txt +metaphors pg3200.txt +metaphysical pg3200.txt +metaphysics pg3200.txt +metaphysics, pg100.txt +metaphysics. pg3200.txt +metcalf's pg31100.txt +mete pg3200.txt +metellus, pg100.txt +metellus; pg100.txt +meteor, pg100.txt +meteor. pg3200.txt +meteoric pg3200.txt +meteors. pg100.txt +meter. pg3200.txt +methinks pg100.txt +methinks, pg100.txt +methinks. pg3200.txt, pg100.txt +methinks; pg100.txt +method pg31100.txt, pg3200.txt, pg100.txt +method, pg31100.txt, pg3200.txt +method- pg100.txt +method--death pg3200.txt +method." pg3200.txt +method? pg3200.txt +methodical pg3200.txt +methodis', pg3200.txt +methodist pg3200.txt +methodist, pg3200.txt +methodists, pg3200.txt +methodless pg3200.txt +methods pg3200.txt, pg100.txt +methods. pg3200.txt +methods: pg3200.txt +methods; pg3200.txt +methods? pg3200.txt +methought pg100.txt +methought, pg100.txt +methought. pg100.txt +methuselah. pg3200.txt +metre? pg100.txt +metropole, pg3200.txt +metropolis pg3200.txt +metropolis!" pg3200.txt +metropolis, pg3200.txt +metropolis. pg3200.txt +metropolitan pg3200.txt +mettez-la pg3200.txt +mettle pg100.txt +mettle, pg100.txt +mettle. pg100.txt +mettle; pg100.txt +mettle? pg100.txt +metz pg3200.txt +metz, pg3200.txt +meung pg3200.txt +meung. pg3200.txt +meus' pg100.txt +mew pg100.txt +mew! pg100.txt +mew'd pg100.txt +mew'd, pg100.txt +mew'd. pg100.txt +mexican pg3200.txt +mexicans pg3200.txt +mexicans, pg3200.txt +mexico pg3200.txt +mexico! pg3200.txt +mexico, pg3200.txt +mexico. pg3200.txt +mexique, pg3200.txt +mezzo pg3200.txt +mezzotints; pg3200.txt +mi. pg100.txt +miasmatic pg3200.txt +mice pg3200.txt +mice. pg100.txt +michael pg31100.txt, pg3200.txt, pg100.txt +michael, pg100.txt +michael. pg100.txt +michaelmas." pg31100.txt +michaelmas? pg100.txt +michigan," pg3200.txt +michigan. pg3200.txt +micks pg3200.txt +microbe" pg3200.txt +microbes pg3200.txt +microscope pg3200.txt +microscope. pg3200.txt +microscopic pg3200.txt +mid-afternoon pg3200.txt +mid-afternoon, pg3200.txt +mid-movement pg3200.txt +mid-night pg3200.txt +mid-ocean. pg3200.txt +mid-stream pg3200.txt +mid-summer, pg3200.txt +mid-way pg100.txt +mid-winter." pg3200.txt +midday pg3200.txt +middle pg31100.txt, pg3200.txt, pg100.txt +middle, pg3200.txt +middle-age pg3200.txt +middle-aged pg3200.txt +middle-sized pg3200.txt +middle. pg3200.txt, pg100.txt +middle; pg100.txt +middle?' pg3200.txt +middleman, pg3200.txt +middlesex, pg31100.txt +middleton pg31100.txt +middleton's pg31100.txt +middleton, pg31100.txt +middleton. pg31100.txt +middleton." pg31100.txt +middletons' pg31100.txt +middletons, pg31100.txt +middletons. pg31100.txt +middling pg3200.txt +midheaven pg3200.txt +midi pg3200.txt +midnight pg3200.txt, pg100.txt +midnight, pg3200.txt, pg100.txt +midnight--ten pg3200.txt +midnight. pg3200.txt, pg100.txt +midnight." pg31100.txt, pg3200.txt +midnight.] pg3200.txt +midnight; pg3200.txt +midnight? pg3200.txt, pg100.txt +midnight?" pg3200.txt +midnight?--and pg3200.txt +midriff. pg100.txt +midst pg31100.txt, pg3200.txt +midst. pg3200.txt, pg100.txt +midst; pg3200.txt, pg100.txt +midsummer pg3200.txt +midsummer," pg31100.txt +midsummer." pg3200.txt +midsummer; pg100.txt +midway pg3200.txt, pg100.txt +midwife. pg100.txt +midwinter; pg3200.txt +mien, pg31100.txt, pg3200.txt +miggles pg3200.txt +might pg31100.txt, pg3200.txt, pg100.txt +might! pg100.txt +might, pg31100.txt, pg3200.txt, pg100.txt +might,' pg100.txt +might- pg100.txt +might-- pg3200.txt +might--" pg3200.txt +might--almost pg3200.txt +might-have-been. pg3200.txt +might-have-beens, pg3200.txt +might-have-beens. pg3200.txt +might. pg31100.txt, pg3200.txt, pg100.txt +might." pg31100.txt, pg3200.txt +might: pg100.txt +might; pg3200.txt, pg100.txt +might? pg100.txt +mightier pg3200.txt +mightiest pg3200.txt +mightily pg3200.txt, pg100.txt +mightily, pg3200.txt +mightily. pg100.txt +mightiness pg3200.txt +mightiness; pg100.txt +mightn't pg3200.txt +mightst pg100.txt +mighty pg3200.txt, pg100.txt +mighty, pg100.txt +mighty. pg100.txt +mighty; pg3200.txt, pg100.txt +migrating. pg3200.txt +mike pg3200.txt +mike, pg3200.txt +mike." pg3200.txt +mil--" pg3200.txt +milan pg3200.txt, pg100.txt +milan! pg100.txt +milan!- pg100.txt +milan, pg100.txt +milan. pg3200.txt, pg100.txt +milan." pg3200.txt +mild pg31100.txt, pg3200.txt +mild, pg31100.txt, pg100.txt +mild. pg100.txt +mild." pg3200.txt +milder? pg100.txt +mildest pg31100.txt +mildly pg3200.txt +mildly, pg100.txt +mildly. pg100.txt +mildly: pg3200.txt +mildness. pg100.txt +mildnesses pg3200.txt +mile pg31100.txt, pg3200.txt, pg100.txt +mile!" pg3200.txt +mile, pg31100.txt, pg3200.txt, pg100.txt +mile--and pg3200.txt +mile-posted pg3200.txt +mile-stoned pg3200.txt +mile. pg3200.txt, pg100.txt +mile." pg31100.txt, pg3200.txt +mile.' pg3200.txt +mile? pg100.txt +miles pg31100.txt, pg3200.txt, pg100.txt +miles! pg3200.txt +miles!-- pg3200.txt +miles' pg3200.txt +miles). pg3200.txt +miles); pg3200.txt +miles, pg31100.txt, pg3200.txt, pg100.txt +miles---and pg3200.txt +miles--for pg3200.txt +miles--it pg3200.txt +miles--mainly pg3200.txt +miles-up-hill. pg3200.txt +miles. pg31100.txt, pg3200.txt +miles." pg31100.txt, pg3200.txt +miles; pg3200.txt +miles?" pg3200.txt +milford pg100.txt +milford, pg100.txt +milford; pg100.txt +militant?" pg3200.txt +military pg31100.txt, pg3200.txt, pg100.txt +militia pg31100.txt +militia. pg31100.txt +milk pg3200.txt, pg100.txt +milk! pg100.txt +milk, pg3200.txt, pg100.txt +milk-teeth pg3200.txt +milk-white, pg100.txt +milk. pg3200.txt +milk.' pg100.txt +milk; pg100.txt +milked pg3200.txt +milkers pg3200.txt +milkings pg3200.txt +milks pg100.txt +milksops! pg100.txt +milksops, pg3200.txt +mill pg3200.txt, pg100.txt +mill, pg3200.txt +mill-race, pg3200.txt +mill-race.' pg3200.txt +mill. pg3200.txt, pg100.txt +millais, pg3200.txt +miller pg3200.txt +miller's pg3200.txt +miller, pg3200.txt +miller-gun pg3200.txt +miller-guns pg3200.txt +miller. pg3200.txt +miller?" pg3200.txt +millerite pg3200.txt +millerites pg3200.txt +millerites, pg3200.txt +millet pg3200.txt +millet!" pg3200.txt +millet's pg3200.txt +millet, pg3200.txt +millet--' pg3200.txt +millet.' pg3200.txt +millet?' pg3200.txt +millie pg3200.txt +milliner pg100.txt +milliner, pg100.txt +millinery pg3200.txt +milling pg3200.txt +milling. pg3200.txt +million pg31100.txt, pg3200.txt +million! pg100.txt +million!" pg3200.txt +million--a pg3200.txt +million--enterprise. pg3200.txt +million--get pg3200.txt +million. pg3200.txt, pg100.txt +million." pg3200.txt +million; pg100.txt +millionaire pg3200.txt +millionaire!" pg3200.txt +millionaire's pg3200.txt +millionaire, pg3200.txt +millionaire. pg3200.txt +millionaire." pg3200.txt +millionaires. pg3200.txt +millions pg3200.txt, pg100.txt +millions! pg3200.txt +millions!" pg3200.txt +millions, pg3200.txt, pg100.txt +millions-- pg3200.txt +millions--use pg3200.txt +millions. pg3200.txt, pg100.txt +millions." pg3200.txt +millions? pg3200.txt +millions?" pg3200.txt +mills pg3200.txt +mills, pg3200.txt, pg100.txt +mills." pg3200.txt +millstone pg3200.txt +millstones. pg100.txt +milrays pg3200.txt +milrays!" pg3200.txt +milrays." pg3200.txt +mils pg3200.txt +milsom pg31100.txt +milton, pg3200.txt +mimic pg3200.txt +mimicked pg3200.txt +mimicry!" pg3200.txt +minarets pg3200.txt +mince-pie pg3200.txt +mince-pies. pg31100.txt +mincing pg3200.txt +mind pg31100.txt, pg3200.txt, pg100.txt +mind! pg31100.txt, pg100.txt +mind!" pg31100.txt +mind!"-- pg31100.txt +mind!--look pg31100.txt +mind" pg3200.txt +mind's pg3200.txt +mind, pg31100.txt, pg3200.txt, pg100.txt +mind- pg100.txt +mind-- pg3200.txt +mind--" pg3200.txt +mind--. pg31100.txt +mind--and pg31100.txt +mind--he pg3200.txt +mind--in pg31100.txt +mind--their pg3200.txt +mind-cure pg3200.txt +mind-cure! pg3200.txt +mind-cure." pg3200.txt +mind-curists, pg3200.txt +mind-healing."' pg3200.txt +mind-healing.'" pg3200.txt +mind-reader?" pg3200.txt +mind-space pg3200.txt +mind-transference; pg3200.txt +mind. pg31100.txt, pg3200.txt, pg100.txt +mind." pg31100.txt, pg3200.txt +mind.' pg3200.txt +mind.'" pg3200.txt +mind: pg3200.txt, pg100.txt +mind; pg31100.txt, pg3200.txt, pg100.txt +mind;--and pg31100.txt +mind? pg3200.txt, pg100.txt +mind?" pg31100.txt, pg3200.txt +mind?' pg3200.txt +minded pg3200.txt, pg100.txt +minded, pg3200.txt, pg100.txt +mindful pg31100.txt, pg3200.txt +minding pg3200.txt +minding. pg31100.txt +minds pg31100.txt, pg3200.txt, pg100.txt +minds! pg100.txt +minds, pg3200.txt, pg100.txt +minds- pg100.txt +minds--the pg3200.txt +minds. pg3200.txt, pg100.txt +minds." pg31100.txt +minds; pg31100.txt, pg100.txt +minds?" pg3200.txt +mine pg31100.txt, pg3200.txt, pg100.txt +mine! pg100.txt +mine!" pg3200.txt +mine!' pg3200.txt +mine), pg100.txt +mine): pg3200.txt +mine, pg31100.txt, pg3200.txt, pg100.txt +mine," pg3200.txt +mine,' pg3200.txt +mine- pg100.txt +mine--" pg31100.txt +mine--a pg3200.txt +mine--at pg3200.txt +mine--hey?" pg31100.txt +mine--what pg31100.txt +mine. pg31100.txt, pg3200.txt, pg100.txt +mine." pg31100.txt, pg3200.txt +mine.' pg3200.txt, pg100.txt +mine.) pg3200.txt +mine: pg3200.txt +mine; pg31100.txt, pg3200.txt, pg100.txt +mine? pg3200.txt, pg100.txt +mine?" pg31100.txt, pg3200.txt +miner pg3200.txt +miner, pg3200.txt +miner. pg3200.txt +minerals pg100.txt +miners pg3200.txt +miners)--and pg3200.txt +miners, pg3200.txt +miners; pg3200.txt +minerva, pg100.txt +mines pg3200.txt, pg100.txt +mines, pg3200.txt +mines--holes pg3200.txt +mines--not pg3200.txt +mines--timber pg3200.txt +mines. pg3200.txt +mines; pg3200.txt, pg100.txt +mines? pg100.txt +minghetti, pg3200.txt +mingle pg3200.txt +mingled pg3200.txt, pg100.txt +mingling pg3200.txt +miniature pg31100.txt, pg3200.txt +miniature, pg3200.txt +miniature." pg3200.txt +miniatures, pg31100.txt +minimo.' pg100.txt +minimum pg3200.txt +mining pg3200.txt +mining! pg3200.txt +mining, pg3200.txt +mining--which pg3200.txt +mining-camp, pg3200.txt +mining. pg3200.txt +mining." pg3200.txt +minion! pg100.txt +minion, pg100.txt +minions pg3200.txt +minist'red pg100.txt +minist'red, pg100.txt +minist'red. pg100.txt +minister pg3200.txt, pg100.txt +minister, pg100.txt +minister. pg3200.txt, pg100.txt +minister; pg3200.txt, pg100.txt +ministerial pg3200.txt +ministering pg3200.txt +ministers pg3200.txt, pg100.txt +ministers, pg31100.txt, pg3200.txt, pg100.txt +ministers. pg3200.txt, pg100.txt +ministers? pg3200.txt +ministrations pg3200.txt +ministry pg3200.txt +ministry! pg3200.txt +ministry. pg3200.txt +ministry?" pg3200.txt +minjekahwan, pg3200.txt +minneapolis pg3200.txt +minneapolis, pg3200.txt +minnehaha. pg3200.txt +minola pg100.txt +minola, pg100.txt +minola. pg100.txt +minola? pg100.txt +minor pg31100.txt, pg3200.txt +minor, pg3200.txt +minorca. pg3200.txt +minore pg3200.txt +minority pg3200.txt, pg100.txt +minority, pg100.txt +minority. pg31100.txt +minstrel pg3200.txt +minstrel--draw pg100.txt +minstrel. pg100.txt +minstrels? pg100.txt +minstrelsy, pg100.txt +minstrelsy. pg100.txt +mint, pg3200.txt, pg100.txt +mint. pg100.txt +mint; pg3200.txt +minted. pg3200.txt +minus pg3200.txt +minute pg31100.txt, pg3200.txt, pg100.txt +minute! pg3200.txt +minute!" pg3200.txt +minute's pg31100.txt, pg100.txt +minute, pg31100.txt, pg3200.txt +minute--" pg3200.txt +minute--i pg3200.txt +minute--if pg3200.txt +minute--two pg3200.txt +minute-lacks! pg100.txt +minute-while; pg100.txt +minute. pg31100.txt, pg3200.txt +minute." pg31100.txt, pg3200.txt +minute.' pg3200.txt +minute; pg31100.txt, pg3200.txt, pg100.txt +minute?" pg3200.txt +minutely pg31100.txt, pg3200.txt +minuteness pg3200.txt +minuteness. pg31100.txt +minutes pg31100.txt, pg3200.txt +minutes! pg3200.txt +minutes!" pg3200.txt +minutes!' pg3200.txt +minutes' pg31100.txt +minutes, pg31100.txt, pg3200.txt +minutes--and pg3200.txt +minutes--in pg3200.txt +minutes--they pg3200.txt +minutes. pg31100.txt, pg3200.txt +minutes." pg31100.txt +minutes: pg31100.txt +minutes; pg31100.txt, pg3200.txt +minutes?" pg3200.txt +minutiae pg3200.txt +minx! pg100.txt +mir pg3200.txt +mirable, pg100.txt +miracle pg3200.txt, pg100.txt +miracle! pg100.txt +miracle!' pg100.txt +miracle, pg3200.txt, pg100.txt +miracle--for pg3200.txt +miracle--the pg3200.txt +miracle-factory pg3200.txt +miracle. pg3200.txt, pg100.txt +miracle." pg3200.txt +miracle: pg3200.txt +miracle; pg3200.txt +miracles pg3200.txt, pg100.txt +miracles, pg3200.txt +miracles--prodigies pg3200.txt +miracles. pg3200.txt +miraculous pg3200.txt +miraculous!" pg3200.txt +miraculous. pg3200.txt +miraculous; pg3200.txt +miraculously pg3200.txt +miranda pg100.txt +miranda! pg100.txt +miranda, pg100.txt +miranda. pg100.txt +miranda] pg100.txt +mire pg3200.txt, pg100.txt +mire, pg100.txt +mire. pg100.txt +mirror pg3200.txt, pg100.txt +mirror'd pg100.txt +mirror, pg3200.txt +mirror. pg3200.txt +mirror; pg3200.txt +mirror?" pg3200.txt +mirrored, pg3200.txt +mirrors pg3200.txt +mirth pg3200.txt, pg100.txt +mirth, pg100.txt +mirth,' pg100.txt +mirth. pg100.txt +mirth.' pg100.txt +mirth; pg31100.txt, pg100.txt +mirth? pg100.txt +mis-paging. pg3200.txt +misadventure. pg100.txt +misapplied, pg100.txt +misapply pg31100.txt +misapprehend. pg3200.txt +misbecome pg100.txt +miscalculation pg3200.txt +miscalculation, pg3200.txt +miscalled pg3200.txt +miscarriage pg3200.txt +miscarried pg100.txt +miscarried, pg100.txt +miscarried. pg3200.txt, pg100.txt +miscarried: pg3200.txt +miscarries pg100.txt +miscarry pg100.txt +miscarry! pg100.txt +miscarry, pg3200.txt, pg100.txt +miscarry. pg100.txt +miscarrying, pg100.txt +miscegenation, pg3200.txt +miscellaneous pg3200.txt +miscellany pg3200.txt +mischance pg100.txt +mischance! pg100.txt +mischance, pg100.txt +mischance. pg100.txt +mischance; pg100.txt +mischances pg100.txt +mischief pg31100.txt, pg3200.txt, pg100.txt +mischief, pg3200.txt, pg100.txt +mischief. pg31100.txt, pg3200.txt, pg100.txt +mischief." pg31100.txt, pg3200.txt +mischief: pg100.txt +mischief; pg31100.txt +mischiefs pg100.txt +mischievous, pg100.txt +mischievous. pg3200.txt +mischosen pg3200.txt +misconceive pg3200.txt +misconduct pg31100.txt +misconduct, pg31100.txt +misconduct. pg31100.txt +misconster pg100.txt +misconstruction; pg100.txt +misconstrued, pg31100.txt +miscreant pg3200.txt +miscreant! pg100.txt +miscreant, pg3200.txt, pg100.txt +miscreant. pg3200.txt +miscreants pg3200.txt +misdeeds, pg100.txt +misdemeanor pg3200.txt +misdemeanor, pg3200.txt +misdemeanor--mere pg3200.txt +misdoubt pg100.txt +misdoubt; pg100.txt +misenum pg100.txt +misenum. pg100.txt +miser pg3200.txt +miserable pg31100.txt, pg3200.txt, pg100.txt +miserable! pg31100.txt, pg100.txt +miserable!" pg3200.txt +miserable, pg31100.txt, pg3200.txt +miserable. pg31100.txt, pg3200.txt, pg100.txt +miserable." pg31100.txt +miserable;--i pg31100.txt +miserable? pg100.txt +miserableness. pg3200.txt +miserably pg3200.txt +misericorde) pg3200.txt +miseries pg3200.txt, pg100.txt +miseries, pg3200.txt, pg100.txt +miseries. pg3200.txt, pg100.txt +miseries: pg100.txt +misery pg31100.txt, pg3200.txt, pg100.txt +misery! pg100.txt +misery, pg31100.txt, pg3200.txt, pg100.txt +misery- pg100.txt +misery--so pg3200.txt +misery. pg31100.txt, pg3200.txt, pg100.txt +misery." pg31100.txt, pg3200.txt +misery; pg3200.txt +misfortune pg31100.txt, pg3200.txt +misfortune, pg31100.txt, pg3200.txt +misfortune--or pg31100.txt +misfortune. pg31100.txt, pg3200.txt +misfortune." pg3200.txt +misfortunes pg31100.txt, pg3200.txt +misfortunes?" pg31100.txt +misgives pg100.txt +misgives. pg100.txt +misgiving pg3200.txt +misgiving. pg3200.txt +misgivings pg3200.txt +misgivings, pg3200.txt +misgivings. pg3200.txt +misgovernment. pg100.txt +misguided, pg3200.txt +mishandled pg3200.txt +mishap pg100.txt +mishap! pg100.txt +mishaps pg3200.txt +mishaps! pg100.txt +mishaps. pg100.txt +mishe-nama." pg3200.txt +misheard; pg100.txt +misinformation pg3200.txt +misinformation; pg3200.txt +misinformed, pg31100.txt +misinterpreted: pg3200.txt +mislaid pg3200.txt +mislead pg3200.txt +mislead. pg3200.txt +misleaders, pg100.txt +misled pg31100.txt, pg3200.txt +misled, pg100.txt +misled. pg31100.txt +misled; pg100.txt +mislike pg100.txt +mislike, pg100.txt +mismanagement pg31100.txt +misnaming pg3200.txt +misnomer. pg3200.txt +misplac'd. pg100.txt +misplaced pg31100.txt, pg3200.txt +misplaced, pg100.txt +misplaced.] pg3200.txt +misplaces? pg100.txt +misprint pg3200.txt +misprision pg3200.txt, pg100.txt +misprision! pg100.txt +misprizing pg100.txt +mispronouncing pg3200.txt +misreports. pg3200.txt +misrepresented." pg31100.txt +miss pg31100.txt, pg3200.txt, pg100.txt +miss" pg3200.txt +miss'd pg100.txt +miss'd. pg100.txt +miss'd? pg100.txt +miss'n." pg3200.txt +miss't. pg100.txt +miss, pg100.txt +miss," pg3200.txt +miss-- pg31100.txt +miss--" pg3200.txt +miss--er--" pg3200.txt +miss. pg31100.txt, pg3200.txt, pg100.txt +missed pg31100.txt, pg3200.txt +missed, pg31100.txt, pg3200.txt +missed. pg31100.txt +missed." pg31100.txt, pg3200.txt +missed: pg100.txt +missed; pg3200.txt +misses pg3200.txt +misses, pg31100.txt +missiles.) pg3200.txt +missing pg31100.txt, pg3200.txt +missing, pg3200.txt, pg100.txt +missing. pg3200.txt, pg100.txt +missing.) pg3200.txt +missing.] pg3200.txt +missing; pg3200.txt +missingly pg100.txt +mission pg31100.txt, pg3200.txt, pg100.txt +mission, pg3200.txt +mission--overlooked pg3200.txt +mission. pg3200.txt +mission? pg3200.txt +missionaries pg3200.txt +missionaries, pg3200.txt +missionaries. pg3200.txt +missionaries; pg3200.txt +missionary pg31100.txt, pg3200.txt +missionary's pg3200.txt +missionary, pg3200.txt +missionary. pg3200.txt +missionary?" pg3200.txt +missionarying pg3200.txt +missionarying, pg3200.txt +missionarying--for pg3200.txt +missions, pg3200.txt +missis--" pg3200.txt +mississippi pg3200.txt +mississippi!' pg3200.txt +mississippi, pg3200.txt +mississippi-- pg3200.txt +mississippi--at pg3200.txt +mississippi. pg3200.txt +mississippi.' pg3200.txt +mississippi..... pg3200.txt +mississippi: pg3200.txt +mississippi; pg3200.txt +missives pg100.txt +missives. pg3200.txt +missouri pg3200.txt +missouri!" pg3200.txt +missouri, pg3200.txt +missouri,) pg3200.txt +missouri--a pg3200.txt +missouri--was pg3200.txt +missouri. pg3200.txt +missouri." pg3200.txt +missouri.' pg3200.txt +missouri: pg3200.txt +missouri;' pg3200.txt +missourian pg3200.txt +missourian--and pg3200.txt +misspelled pg3200.txt +misstatements pg3200.txt +misstatements, pg3200.txt +mist pg3200.txt +mist, pg3200.txt +mist; pg3200.txt +mistak'st, pg100.txt +mistake pg31100.txt, pg3200.txt, pg100.txt +mistake!" pg31100.txt, pg3200.txt +mistake, pg31100.txt, pg3200.txt +mistake--flawless pg3200.txt +mistake--for pg31100.txt +mistake--it pg3200.txt +mistake. pg31100.txt, pg3200.txt, pg100.txt +mistake." pg3200.txt +mistake; pg31100.txt, pg3200.txt, pg100.txt +mistaken pg31100.txt, pg3200.txt +mistaken, pg31100.txt, pg3200.txt +mistaken--more pg31100.txt +mistaken. pg31100.txt, pg3200.txt +mistaken." pg31100.txt +mistaken."-- pg31100.txt +mistaken; pg31100.txt, pg3200.txt, pg100.txt +mistaken? pg3200.txt +mistakenly pg3200.txt +mistakes pg31100.txt, pg3200.txt +mistakes, pg3200.txt +mistakes," pg31100.txt +mistakes. pg3200.txt +mistakes." pg3200.txt +mistakes; pg3200.txt +mistaking pg3200.txt, pg100.txt +mistaking, pg100.txt +mistaking. pg100.txt +mistaking; pg100.txt +misteaching. pg3200.txt +mister?" pg3200.txt +misthought pg100.txt +mistletoe; pg100.txt +mistook pg3200.txt, pg100.txt +mistook. pg100.txt +mistook; pg100.txt +mistress pg31100.txt, pg3200.txt, pg100.txt +mistress! pg31100.txt, pg100.txt +mistress, pg31100.txt, pg100.txt +mistress- pg100.txt +mistress. pg31100.txt, pg3200.txt, pg100.txt +mistress." pg31100.txt +mistress.' pg100.txt +mistress; pg100.txt +mistress? pg100.txt +mistresses pg31100.txt, pg3200.txt +mistresses! pg100.txt +mistresss pg100.txt +mistrust pg31100.txt, pg100.txt +mistrust. pg100.txt +mistrust; pg100.txt +mistrusted pg3200.txt +mists pg3200.txt, pg100.txt +mists, pg3200.txt +misty pg3200.txt +misunderstand pg31100.txt +misunderstanding pg3200.txt +misunderstandings pg3200.txt +misunderstood pg31100.txt +misunderstood, pg31100.txt +misunderstood,' pg31100.txt +misunderstood. pg31100.txt +misuse pg100.txt +misuse, pg100.txt +misuse? pg100.txt +misuses pg100.txt +mitchell, pg3200.txt +mitchell. pg3200.txt +mitchells, pg31100.txt +mite pg3200.txt +mite--that pg3200.txt +mites pg3200.txt +mites, pg100.txt +mithoo pg3200.txt +mitigate pg3200.txt +mitigation pg100.txt +mitre pg3200.txt +mittagong pg3200.txt +mittagong! pg3200.txt +mix pg31100.txt, pg3200.txt +mix'd! pg100.txt +mix'd. pg100.txt +mixed pg31100.txt, pg3200.txt +mixed, pg3200.txt +mixed-up pg3200.txt +mixed. pg3200.txt +mixed." pg3200.txt +mixed; pg3200.txt +mixes pg3200.txt +mixing pg31100.txt, pg3200.txt +mixture pg31100.txt, pg3200.txt +mixture; pg3200.txt +mo' pg3200.txt +mo'!" pg3200.txt +mo', pg3200.txt +mo'--en' pg3200.txt +mo'." pg3200.txt +mo. pg3200.txt +mo." pg3200.txt +mo.: pg3200.txt +moa pg3200.txt +moa. pg3200.txt +moab pg3200.txt +moan pg31100.txt +moan! pg100.txt +moan, pg3200.txt, pg100.txt +moan- pg100.txt +moan. pg3200.txt, pg100.txt +moan." pg3200.txt +moan; pg100.txt +moan? pg100.txt +moaned. pg3200.txt +moaning pg3200.txt +moanings pg3200.txt +moans, pg100.txt +moans. pg100.txt +moat pg3200.txt +moat. pg3200.txt +mob pg3200.txt +mob!" pg3200.txt +mob, pg3200.txt +mob. pg3200.txt +mob; pg3200.txt +mob?" pg3200.txt +mobbed pg3200.txt +mobile pg3200.txt +moccasins, pg3200.txt +moccasins--a pg3200.txt +mock pg31100.txt, pg3200.txt, pg100.txt +mock'd pg100.txt +mock'd, pg100.txt +mock'd. pg100.txt +mock'd; pg100.txt +mock, pg100.txt +mock-heroics pg3200.txt +mock. pg3200.txt +mocked pg3200.txt +mockeries! pg100.txt +mockers, pg100.txt +mockery pg100.txt +mockery. pg3200.txt, pg100.txt +mockery? pg100.txt +mocking pg3200.txt, pg100.txt +mocking--tomb pg3200.txt +mocking. pg100.txt +mockingly-- pg3200.txt +mocks pg100.txt +mocks! pg100.txt +mocks, pg100.txt +mockwater. pg100.txt +mode pg31100.txt, pg3200.txt +mode. pg3200.txt +model pg31100.txt, pg3200.txt, pg100.txt +model, pg100.txt +model. pg31100.txt +model; pg100.txt +moderate pg31100.txt, pg3200.txt +moderate, pg31100.txt, pg3200.txt +moderate. pg100.txt +moderate." pg31100.txt +moderated pg3200.txt +moderated. pg3200.txt +moderately pg3200.txt +moderately. pg31100.txt +moderation pg31100.txt, pg3200.txt +moderation, pg31100.txt, pg3200.txt +moderation: pg3200.txt +moderation? pg100.txt +modern pg31100.txt, pg3200.txt +modern, pg3200.txt +modern. pg3200.txt +modern." pg3200.txt +modernised, pg31100.txt +modernly: pg3200.txt +moderns; pg3200.txt +modest pg31100.txt, pg3200.txt, pg100.txt +modest, pg31100.txt, pg3200.txt, pg100.txt +modest. pg31100.txt, pg3200.txt, pg100.txt +modester pg3200.txt +modestest pg3200.txt +modesties pg100.txt +modesties, pg100.txt +modestly pg3200.txt, pg100.txt +modestly, pg100.txt +modestly. pg31100.txt +modesty pg31100.txt, pg3200.txt, pg100.txt +modesty, pg31100.txt, pg100.txt +modesty-- pg3200.txt +modesty--moses pg3200.txt +modesty. pg3200.txt, pg100.txt +modesty." pg31100.txt +modesty: pg3200.txt +modesty; pg100.txt +modesty?" pg31100.txt +modification pg3200.txt +modification, pg3200.txt +modification--it pg3200.txt +modification?' pg3200.txt +modifications pg3200.txt +modifications, pg3200.txt +modified pg3200.txt, pg100.txt +modified. pg3200.txt +modify pg3200.txt +modifying pg3200.txt +module pg100.txt +moe pg100.txt +moe! pg100.txt +moe, pg100.txt +moffett pg3200.txt +moffett. pg3200.txt +moffett: pg3200.txt +mogul pg3200.txt +mohammedan pg3200.txt +mohammedan, pg3200.txt +mohammedanism. pg3200.txt +mohammedans pg3200.txt +mohawk." pg3200.txt +moi! pg100.txt +moi- pg100.txt +moi. pg100.txt +moiety. pg3200.txt, pg100.txt +moiety? pg100.txt +moisten pg3200.txt +moistened pg3200.txt +moistening pg3200.txt +moisture pg3200.txt, pg100.txt +moisture. pg3200.txt +molasses pg3200.txt +molasses! pg3200.txt +molasses. pg3200.txt +mold pg3200.txt +mold. pg3200.txt +moldering pg3200.txt +moldy pg3200.txt +mole pg3200.txt, pg100.txt +molehill pg100.txt +moles. pg3200.txt +moles; pg3200.txt +molest pg3200.txt +molest." pg3200.txt +molested pg3200.txt +molested. pg3200.txt +molested." pg3200.txt +mollasses, pg3200.txt +mollie pg3200.txt +mollie, pg3200.txt +mollie. pg3200.txt +moloch; pg3200.txt +moment pg31100.txt, pg3200.txt, pg100.txt +moment! pg31100.txt, pg3200.txt +moment!" pg3200.txt +moment" pg3200.txt +moment's pg31100.txt, pg3200.txt +moment, pg31100.txt, pg3200.txt, pg100.txt +moment-- pg31100.txt, pg3200.txt +moment--dog pg3200.txt +moment--is pg3200.txt +moment--only pg3200.txt +moment--then--] pg3200.txt +moment--there pg31100.txt +moment--there; pg3200.txt +moment--yet pg3200.txt +moment-bothering pg3200.txt +moment. pg31100.txt, pg3200.txt, pg100.txt +moment." pg31100.txt, pg3200.txt +moment.] pg3200.txt +moment: pg31100.txt, pg3200.txt +moment:) pg3200.txt +moment; pg31100.txt, pg3200.txt, pg100.txt +moment?" pg31100.txt, pg3200.txt +momentarily pg3200.txt +momentary pg31100.txt, pg3200.txt, pg100.txt +momentary, pg3200.txt +momentous pg3200.txt +moments pg31100.txt, pg3200.txt +moments' pg31100.txt, pg3200.txt +moments, pg31100.txt, pg3200.txt +moments. pg31100.txt, pg3200.txt +moments." pg31100.txt +moments; pg3200.txt +moments?' pg3200.txt +momentum, pg3200.txt +momma pg3200.txt +mon pg100.txt +monarch pg3200.txt +monarch! pg100.txt +monarch's. pg3200.txt +monarch, pg3200.txt, pg100.txt +monarch-- pg3200.txt +monarch. pg3200.txt +monarch." pg3200.txt +monarch; pg100.txt +monarch? pg3200.txt, pg100.txt +monarchies pg3200.txt +monarchies, pg100.txt +monarchists, pg3200.txt +monarchs pg3200.txt, pg100.txt +monarchs, pg3200.txt +monarchs--the pg3200.txt +monarchy pg3200.txt +monarchy, pg3200.txt +monarchy-see pg100.txt +monarchy. pg100.txt +monarchy; pg3200.txt +monasteries pg3200.txt +monastery pg100.txt +monastery. pg3200.txt +monastery; pg100.txt +moncrieff, pg3200.txt +monday pg31100.txt, pg3200.txt +monday, pg31100.txt +monday. pg3200.txt +monday." pg31100.txt +monday; pg3200.txt +monday?" pg3200.txt +mondays, pg3200.txt +mondays; pg3200.txt +money pg31100.txt, pg3200.txt, pg100.txt +money! pg3200.txt, pg100.txt +money!" pg3200.txt +money's pg3200.txt +money's--" pg3200.txt +money) pg3200.txt +money, pg31100.txt, pg3200.txt, pg100.txt +money-- pg3200.txt +money--and pg3200.txt +money--for pg3200.txt +money-believing pg3200.txt +money-changers pg3200.txt +money-grubbing pg3200.txt +money-making pg3200.txt +money-making, pg3200.txt +money-necessity pg3200.txt +money-reward pg3200.txt +money-sources, pg3200.txt +money. pg31100.txt, pg3200.txt, pg100.txt +money." pg31100.txt, pg3200.txt +money.' pg3200.txt +money... pg31100.txt +money.] pg3200.txt, pg100.txt +money; pg31100.txt, pg3200.txt, pg100.txt +money? pg31100.txt, pg3200.txt, pg100.txt +money?" pg3200.txt +money] pg100.txt +moneys'? pg100.txt +moneys, pg100.txt +mongoose pg3200.txt +mongoose, pg3200.txt +mongrel pg3200.txt +mongrel. pg3200.txt +mongrel." pg3200.txt +mongrel? pg100.txt +mongrels pg3200.txt +monitor pg3200.txt +monitor? pg3200.txt +monitors, pg3200.txt +monk pg3200.txt +monk, pg3200.txt +monk. pg3200.txt +monk: pg3200.txt +monk; pg100.txt +monkey pg3200.txt +monkey!' pg3200.txt +monkey, pg3200.txt, pg100.txt +monkey-wrench pg3200.txt +monkey. pg3200.txt, pg100.txt +monkeys! pg100.txt +monkeys, pg100.txt +monkeys. pg100.txt +monkeys; pg3200.txt +monkish pg3200.txt +monks pg3200.txt +monks, pg3200.txt +monks. pg3200.txt, pg100.txt +monks; pg3200.txt +monmouth, pg3200.txt, pg100.txt +monmouth. pg100.txt +monmouth; pg100.txt +mono, pg3200.txt +monogram--mine! pg3200.txt +monolith pg3200.txt +monomaniac, pg3200.txt +monopolise pg3200.txt +monopolize pg3200.txt +monopoly pg3200.txt +monopoly, pg3200.txt +monopoly. pg3200.txt +monosyllabic pg3200.txt +monosyllable. pg31100.txt +monosyllables: pg3200.txt +monotonies pg3200.txt +monotonous pg3200.txt +monotonous!" pg3200.txt +monotonous. pg3200.txt +monotonous." pg3200.txt +monotonous; pg3200.txt +monotonously pg3200.txt +monotonousness pg3200.txt +monotony pg3200.txt +monotony. pg3200.txt +monsieur pg3200.txt, pg100.txt +monsieur!" pg3200.txt +monsieur," pg3200.txt +monsieur. pg100.txt +monsieur." pg3200.txt +monsieur? pg100.txt +monsieurs pg100.txt +monster pg3200.txt, pg100.txt +monster! pg100.txt +monster's pg100.txt +monster, pg3200.txt, pg100.txt +monster-headed pg3200.txt +monster. pg3200.txt, pg100.txt +monster? pg100.txt +monsters pg3200.txt +monsters. pg100.txt +monsters? pg100.txt +monstrosities, pg3200.txt +monstrous pg31100.txt, pg3200.txt, pg100.txt +monstrous! pg100.txt +monstrous. pg3200.txt +monstrousness pg100.txt +mont pg3200.txt +montage, pg100.txt +montague pg3200.txt, pg100.txt +montague! pg100.txt +montague's. pg100.txt +montague, pg3200.txt, pg100.txt +montague. pg100.txt +montague; pg100.txt +montague? pg100.txt +montagues pg3200.txt +montagues! pg100.txt +montagues. pg100.txt +montaigne's pg3200.txt +montaigne, pg3200.txt +montaine" pg3200.txt +montana pg3200.txt +montano pg100.txt +montano, pg100.txt +montant. pg100.txt +montanvert. pg3200.txt +montanvert." pg3200.txt +monte pg3200.txt +montferrat? pg100.txt +montgomery pg100.txt +montgomery, pg3200.txt, pg100.txt +month pg31100.txt, pg3200.txt, pg100.txt +month! pg3200.txt +month's pg100.txt +month). pg3200.txt +month, pg31100.txt, pg3200.txt, pg100.txt +month- pg100.txt +month--" pg3200.txt +month--that pg3200.txt +month. pg3200.txt, pg100.txt +month." pg31100.txt, pg3200.txt +month.) pg3200.txt +month; pg3200.txt +month? pg100.txt +month?" pg3200.txt +monthlies. pg3200.txt +monthly pg3200.txt +monthly, pg3200.txt +monthly-nurses pg3200.txt +monthly. pg3200.txt +monthly: pg3200.txt +months pg31100.txt, pg3200.txt, pg100.txt +months! pg3200.txt +months!" pg31100.txt +months' pg31100.txt +months, pg31100.txt, pg3200.txt +months--' pg3200.txt +months--forgot pg3200.txt +months--he pg31100.txt +months--it pg3200.txt +months. pg31100.txt, pg3200.txt, pg100.txt +months." pg31100.txt, pg3200.txt +months.] pg3200.txt +months: pg3200.txt +months; pg31100.txt, pg3200.txt +months? pg3200.txt +months?" pg31100.txt, pg3200.txt +montjoy pg100.txt +montjoy, pg100.txt +montjoy. pg100.txt +montreal pg3200.txt +montreal: pg3200.txt +monument pg31100.txt, pg3200.txt, pg100.txt +monument! pg100.txt +monument!" pg3200.txt +monument, pg3200.txt, pg100.txt +monument--monument pg3200.txt +monument--what pg3200.txt +monument. pg3200.txt, pg100.txt +monument." pg3200.txt +monument; pg3200.txt, pg100.txt +monumental pg3200.txt +monumental. pg3200.txt +monuments pg3200.txt, pg100.txt +monuments, pg3200.txt +monuments. pg100.txt +monuments; pg100.txt +mood pg31100.txt, pg3200.txt, pg100.txt +mood! pg100.txt +mood, pg3200.txt, pg100.txt +mood- pg100.txt +mood. pg3200.txt +mood." pg3200.txt +mood: pg100.txt +mood; pg3200.txt +moods pg3200.txt +moods, pg100.txt +moods. pg3200.txt, pg100.txt +moods; pg3200.txt, pg100.txt +moody pg3200.txt, pg100.txt +moody, pg3200.txt +moody. pg100.txt +moody? pg100.txt +moon pg3200.txt, pg100.txt +moon! pg3200.txt, pg100.txt +moon, pg3200.txt, pg100.txt +moon- pg100.txt +moon-- pg3200.txt +moon--in pg3200.txt +moon-calf! pg100.txt +moon-calf. pg100.txt +moon-faced pg3200.txt +moon-light. pg3200.txt +moon. pg3200.txt, pg100.txt +moon; pg100.txt +moon? pg100.txt +moon?" pg3200.txt +mooncalves!" pg3200.txt +mooning pg3200.txt +moonlight pg3200.txt +moonlight! pg3200.txt +moonlight, pg3200.txt +moonlight. pg31100.txt, pg3200.txt, pg100.txt +moonlit pg3200.txt +moons! pg100.txt +moons, pg3200.txt +moonshine pg100.txt +moonshine. pg3200.txt, pg100.txt +moonshine] pg100.txt +moonshiners. pg3200.txt +moonshines pg100.txt +moor pg100.txt +moor!" pg100.txt +moor, pg100.txt +moor- pg100.txt +moor. pg100.txt +moor; pg100.txt +moor? pg100.txt +moore pg3200.txt +moore--is pg3200.txt +moore: pg3200.txt +moored pg3200.txt +moored; pg3200.txt +mooring pg3200.txt +moorish pg3200.txt +moorooroo pg3200.txt +moors pg3200.txt, pg100.txt +mope pg3200.txt +mope. pg100.txt +moping pg3200.txt +mopping pg100.txt +mopsa pg100.txt +moque pg3200.txt +mor- pg3200.txt +moraine pg3200.txt +moraine, pg3200.txt +moraines pg3200.txt +moral pg31100.txt, pg3200.txt, pg100.txt +moral--honesty, pg3200.txt +moral-honesty, pg3200.txt +moral. pg3200.txt, pg100.txt +moral; pg3200.txt +moralist. pg3200.txt +moralists pg31100.txt +moralists, pg31100.txt +moralities pg3200.txt +moralities. pg3200.txt +moralities; pg3200.txt +morality pg3200.txt +morality, pg31100.txt, pg3200.txt +morality. pg31100.txt, pg3200.txt +morality?" pg3200.txt +moralize pg3200.txt +morally pg3200.txt +morally, pg3200.txt +morally. pg3200.txt +morals pg3200.txt +morals, pg31100.txt, pg3200.txt +morals,--these pg3200.txt +morals--" pg3200.txt +morals. pg3200.txt +morals; pg3200.txt +morals? pg3200.txt +morasses pg3200.txt +moravians, pg3200.txt +mordred pg3200.txt +mordred; pg3200.txt +more! pg3200.txt, pg100.txt +more!" pg31100.txt, pg3200.txt +more'n pg3200.txt +more). pg3200.txt +more, pg31100.txt, pg3200.txt, pg100.txt +more," pg3200.txt +more- pg100.txt +more--" pg3200.txt +more--although pg3200.txt +more--and pg3200.txt +more--er--important pg3200.txt +more--ergo, pg3200.txt +more--for pg3200.txt +more--honest, pg3200.txt +more--i pg3200.txt +more--it pg3200.txt +more--now pg3200.txt +more--the pg3200.txt +more--to pg31100.txt +more--trying pg3200.txt +more--well, pg3200.txt +more. pg31100.txt, pg3200.txt, pg100.txt +more." pg31100.txt, pg3200.txt, pg100.txt +more.' pg3200.txt, pg100.txt +more.] pg3200.txt +more: pg3200.txt, pg100.txt +more; pg31100.txt, pg3200.txt, pg100.txt +more;--they pg31100.txt +more? pg100.txt +more?" pg3200.txt +morel pg3200.txt +moreover pg31100.txt, pg3200.txt +moreover, pg31100.txt, pg3200.txt, pg100.txt +moreover--but pg3200.txt +moreright pg3200.txt +morgan pg3200.txt, pg100.txt +morgan, pg100.txt +morganatic pg3200.txt +morganatic." pg3200.txt +morganore. pg3200.txt +morgen? pg3200.txt +morgue--the pg3200.txt +moriah pg3200.txt +moribund pg3200.txt +morion pg3200.txt +morisco, pg100.txt +morland pg31100.txt +morland's pg31100.txt +morland's, pg31100.txt +morland, pg31100.txt +morland," pg31100.txt +morland--sorry pg31100.txt +morland. pg31100.txt +morland; pg31100.txt +morland?" pg31100.txt +morlands pg31100.txt +mormon pg3200.txt +mormon, pg3200.txt +mormon. pg3200.txt +mormon; pg3200.txt +mormondom pg3200.txt +mormons pg3200.txt +mormons!" pg3200.txt +mormons, pg3200.txt +mormons. pg3200.txt +morn pg3200.txt, pg100.txt +morn, pg100.txt +morn. pg100.txt +morn; pg100.txt +mornin pg3200.txt +mornin'--in pg3200.txt +mornin'." pg3200.txt +morning pg31100.txt, pg3200.txt, pg100.txt +morning! pg3200.txt, pg100.txt +morning!" pg31100.txt, pg3200.txt +morning's pg31100.txt, pg3200.txt +morning, pg31100.txt, pg3200.txt, pg100.txt +morning- pg100.txt +morning--he pg3200.txt +morning--i pg31100.txt +morning--it pg3200.txt +morning--principal pg3200.txt +morning--scene pg3200.txt +morning--was pg31100.txt +morning-glories pg3200.txt +morning-room pg31100.txt +morning. pg31100.txt, pg3200.txt, pg100.txt +morning." pg31100.txt, pg3200.txt +morning.' pg3200.txt +morning.'" pg3200.txt +morning: pg3200.txt +morning; pg31100.txt, pg3200.txt, pg100.txt +morning? pg100.txt +morning?" pg31100.txt, pg3200.txt +morning?--do, pg31100.txt +mornings pg3200.txt +mornings! pg3200.txt +mornings, pg3200.txt +mornings--for pg3200.txt +morocco pg3200.txt +morocco, pg100.txt +morocco-bound pg3200.txt +morocco?' pg3200.txt +moroni, pg3200.txt +morris pg3200.txt +morris-dance; pg100.txt +morris-pike. pg100.txt +morrissey's." pg3200.txt +morrow pg31100.txt, pg3200.txt +morrow! pg100.txt +morrow's pg3200.txt +morrow, pg31100.txt, pg100.txt +morrow,caesar. pg100.txt +morrow. pg3200.txt, pg100.txt +morrow.' pg100.txt +morrow; pg100.txt +morrows. pg100.txt +morse pg3200.txt +morse, pg3200.txt +morsel pg3200.txt +mort--death's- pg3200.txt +mortal pg3200.txt, pg100.txt +mortal! pg31100.txt, pg100.txt +mortal, pg31100.txt, pg3200.txt, pg100.txt +mortal. pg31100.txt, pg3200.txt, pg100.txt +mortal." pg3200.txt +mortal; pg100.txt +mortality pg100.txt +mortality, pg100.txt +mortality. pg3200.txt, pg100.txt +mortally pg3200.txt +mortals, pg31100.txt, pg3200.txt +mortals. pg31100.txt +mortals? pg100.txt +mortar pg3200.txt +mortgage pg3200.txt +mortgage-ridden pg3200.txt +mortgage." pg3200.txt +mortgaged pg3200.txt +mortifera." pg3200.txt +mortification pg31100.txt, pg3200.txt +mortification, pg31100.txt +mortification--that pg3200.txt +mortification. pg3200.txt +mortification; pg31100.txt +mortifications, pg31100.txt +mortified pg31100.txt, pg3200.txt +mortified!" pg3200.txt +mortified, pg3200.txt +mortified. pg31100.txt, pg100.txt +mortify pg31100.txt +mortifying pg31100.txt +mortifying. pg3200.txt +mortifying; pg31100.txt +mortimer pg100.txt +mortimer, pg100.txt +mortimer,55 pg3200.txt +mortimer- pg100.txt +mortimer. pg3200.txt, pg100.txt +mortimer; pg100.txt +mortimer? pg100.txt +mortimers, pg100.txt +morton pg31100.txt, pg100.txt +morton, pg31100.txt, pg100.txt +morton." pg31100.txt +morton; pg100.txt +mortons pg3200.txt +mos' pg3200.txt +mos'ly pg3200.txt +mosaic, pg3200.txt +mosaic: pg3200.txt +mosaics pg3200.txt +moses pg3200.txt +moses! pg3200.txt +moses!" pg3200.txt +moses" pg3200.txt +moses' pg3200.txt +moses, pg3200.txt +moses," pg3200.txt +moslem pg3200.txt +mosque pg3200.txt +mosque, pg3200.txt +mosque. pg3200.txt +mosque; pg3200.txt +mosques. pg3200.txt +mosquito pg3200.txt +mosquito." pg3200.txt +mosquito; pg3200.txt +mosquitoes pg31100.txt, pg3200.txt +mosquitoes, pg3200.txt +moss pg3200.txt +moss, pg3200.txt +moss-bank pg3200.txt +moss-bearded pg3200.txt +moss; pg3200.txt, pg100.txt +most!" pg3200.txt +most, pg31100.txt, pg3200.txt, pg100.txt +most- pg100.txt +most--then pg3200.txt +most-visited pg3200.txt +most. pg31100.txt, pg3200.txt, pg100.txt +most." pg3200.txt +most; pg31100.txt, pg3200.txt, pg100.txt +most? pg100.txt +most?" pg31100.txt +mostly pg3200.txt +mostly. pg3200.txt +mostly." pg3200.txt +mostly.' pg3200.txt +mote-magnifying pg3200.txt +moth pg3200.txt, pg100.txt +moth. pg100.txt +moth? pg100.txt +mother pg31100.txt, pg3200.txt, pg100.txt +mother! pg31100.txt, pg3200.txt, pg100.txt +mother!" pg31100.txt, pg3200.txt +mother!"-- pg31100.txt +mother" pg3200.txt +mother's pg31100.txt, pg3200.txt, pg100.txt +mother, pg31100.txt, pg3200.txt, pg100.txt +mother,' pg100.txt +mother,--i pg31100.txt +mother--" pg3200.txt +mother--indeed pg31100.txt +mother--sisters, pg3200.txt +mother--wanting pg31100.txt +mother-church pg3200.txt +mother-church! pg3200.txt +mother-church." pg3200.txt +mother-church: pg3200.txt +mother-in-law pg3200.txt +mother-in-law's pg31100.txt +mother-in-law, pg31100.txt, pg3200.txt +mother-in-law. pg31100.txt, pg3200.txt +mother-love? pg3200.txt +mother-queen, pg100.txt +mother-stork pg3200.txt +mother. pg31100.txt, pg3200.txt, pg100.txt +mother." pg31100.txt, pg3200.txt +mother.' pg3200.txt +mother.'" pg3200.txt +mother.) pg31100.txt +mother: pg31100.txt, pg3200.txt +mother; pg31100.txt, pg3200.txt, pg100.txt +mother? pg31100.txt, pg3200.txt, pg100.txt +mother?" pg31100.txt, pg3200.txt +mother?"' pg100.txt +motherhood-marriage pg3200.txt +motherland pg3200.txt +motherly pg3200.txt +mothers pg31100.txt, pg3200.txt +mothers, pg31100.txt, pg3200.txt, pg100.txt +mothers; pg3200.txt +mothers? pg100.txt +moths. pg100.txt +motion pg31100.txt, pg3200.txt, pg100.txt +motion!" pg3200.txt +motion!'" pg3200.txt +motion, pg3200.txt +motion. pg31100.txt, pg3200.txt, pg100.txt +motion.' pg3200.txt +motion; pg3200.txt, pg100.txt +motion? pg100.txt +motion?' pg3200.txt +motionless pg31100.txt, pg3200.txt +motionless, pg3200.txt +motionless. pg31100.txt +motionless; pg100.txt +motions pg3200.txt +motions, pg3200.txt +motions. pg100.txt +motions; pg100.txt +motive pg31100.txt, pg3200.txt, pg100.txt +motive! pg31100.txt +motive, pg31100.txt, pg3200.txt, pg100.txt +motive. pg31100.txt, pg3200.txt, pg100.txt +motive." pg31100.txt +motive; pg31100.txt +motive? pg31100.txt +motive?" pg31100.txt +motives pg31100.txt, pg3200.txt +motives. pg31100.txt, pg3200.txt +motley pg100.txt +motley-minded pg100.txt +motley? pg100.txt +motoring pg3200.txt +mots pg100.txt +mott pg3200.txt +motto pg3200.txt +motto, pg31100.txt +motto--"united pg3200.txt +motto. pg3200.txt +mottoes pg3200.txt +mouche pg3200.txt +mouf pg3200.txt +mould pg3200.txt, pg100.txt +mould. pg3200.txt, pg100.txt +mould; pg3200.txt +moulded-envy; pg100.txt +moulder pg3200.txt +mouldering pg3200.txt +mouldy pg100.txt +mouldy, pg3200.txt, pg100.txt +mouldy? pg100.txt +moult pg3200.txt, pg100.txt +mound pg3200.txt +mound, pg3200.txt +mound." pg3200.txt +mounds pg3200.txt +mounsieur pg100.txt +mounsieur, pg100.txt +mounsieur. pg100.txt +mount pg3200.txt, pg100.txt +mount, pg3200.txt, pg100.txt +mount. pg100.txt +mount; pg100.txt +mountain pg3200.txt, pg100.txt +mountain's pg3200.txt +mountain, pg3200.txt +mountain-cabbage pg3200.txt +mountain-climbing, pg3200.txt +mountain-climbing. pg3200.txt +mountain-goat, pg100.txt +mountain-peak pg3200.txt +mountain-range pg3200.txt +mountain-terraces, pg3200.txt +mountain-top pg3200.txt +mountain. pg3200.txt, pg100.txt +mountain." pg3200.txt +mountain; pg3200.txt +mountain? pg100.txt +mountaineer? pg3200.txt +mountaineers, pg100.txt +mountaineers? pg100.txt +mountains pg3200.txt +mountains! pg100.txt +mountains' pg3200.txt +mountains, pg3200.txt +mountains--pure pg3200.txt +mountains--yet pg3200.txt +mountains. pg3200.txt, pg100.txt +mountains; pg3200.txt +mountains?" pg3200.txt +mountainside pg3200.txt +mountainside, pg3200.txt +mountainside. pg3200.txt +mountainsides pg3200.txt +mounteagle. pg31100.txt +mountebank!' pg3200.txt +mountebank, pg100.txt +mountebanks, pg100.txt +mountebanks; pg100.txt +mounted pg31100.txt, pg3200.txt, pg100.txt +mounted, pg3200.txt +mounted. pg3200.txt, pg100.txt +mounting pg31100.txt, pg3200.txt +mourir. pg3200.txt +mourn pg3200.txt, pg100.txt +mourn! pg100.txt +mourn'd pg100.txt +mourn, pg3200.txt +mourn--a--a-- pg3200.txt +mourn. pg100.txt +mourn; pg100.txt +mourned pg3200.txt +mourned. pg3200.txt +mourner pg3200.txt +mourner, pg3200.txt, pg100.txt +mourners pg3200.txt +mourners. pg3200.txt +mournful pg3200.txt +mournful, pg3200.txt +mournful: pg3200.txt +mournfull pg31100.txt +mournfully pg31100.txt +mournfully: pg3200.txt +mournfully; pg100.txt +mournfulness pg3200.txt +mourning pg31100.txt, pg3200.txt, pg100.txt +mourning, pg31100.txt +mourning-gowns, pg100.txt +mourning. pg31100.txt, pg3200.txt +mourning." pg3200.txt +mourning; pg3200.txt +mournings pg100.txt +mousa's. pg3200.txt +mouse pg3200.txt, pg100.txt +mouse's pg3200.txt +mouse, pg3200.txt +mouse-traps pg3200.txt +mouse. pg3200.txt +mouse." pg3200.txt +mouse; pg3200.txt, pg100.txt +mousie-squeak pg3200.txt +mousing pg3200.txt +moustached, pg3200.txt +mouth pg31100.txt, pg3200.txt, pg100.txt +mouth! pg100.txt +mouth" pg3200.txt +mouth, pg31100.txt, pg3200.txt, pg100.txt +mouth," pg3200.txt +mouth- pg100.txt +mouth--and pg3200.txt +mouth--i pg31100.txt +mouth-lived pg3200.txt +mouth-piece pg3200.txt +mouth. pg3200.txt, pg100.txt +mouth." pg3200.txt +mouth.' pg100.txt +mouth: pg100.txt +mouth; pg3200.txt, pg100.txt +mouth? pg3200.txt, pg100.txt +mouth?" pg3200.txt +mouth] pg100.txt +mouthful pg3200.txt +mouthful, pg3200.txt +mouthfuls pg31100.txt +mouthfuls, pg3200.txt +mouthpiece. pg3200.txt +mouths pg3200.txt, pg100.txt +mouths, pg31100.txt, pg3200.txt, pg100.txt +mouths- pg100.txt +mouths. pg3200.txt, pg100.txt +mouths.' pg3200.txt +mouths; pg100.txt +mov'd, pg100.txt +mov'd. pg100.txt +mov'd? pg100.txt +mov'st? pg100.txt +movable pg3200.txt +movables pg100.txt +move pg31100.txt, pg3200.txt, pg100.txt +move! pg100.txt +move, pg31100.txt, pg3200.txt, pg100.txt +move," pg31100.txt +move--in pg3200.txt +move--move pg3200.txt +move. pg31100.txt, pg3200.txt, pg100.txt +move." pg3200.txt +move: pg3200.txt +move; pg31100.txt, pg3200.txt, pg100.txt +moveable. pg100.txt +moveable? pg100.txt +moveables, pg100.txt +moveables; pg100.txt +moved pg31100.txt, pg3200.txt, pg100.txt +moved, pg3200.txt +moved--but pg3200.txt +moved. pg31100.txt, pg3200.txt, pg100.txt +moved; pg3200.txt, pg100.txt +movement pg3200.txt +movement, pg3200.txt +movement. pg31100.txt, pg3200.txt +movement; pg3200.txt +movements pg3200.txt +movements. pg3200.txt +moves pg3200.txt, pg100.txt +moves. pg3200.txt, pg100.txt +moving pg31100.txt, pg3200.txt, pg100.txt +moving, pg31100.txt, pg3200.txt, pg100.txt +moving. pg3200.txt +moving." pg3200.txt +moving; pg3200.txt +moving? pg100.txt +moving] pg100.txt +movingly. pg100.txt +mow pg3200.txt, pg100.txt +mow. pg100.txt +mowbray pg100.txt +mowbray, pg100.txt +mowbray. pg100.txt +mowbray? pg100.txt +mowing pg3200.txt +mows, pg100.txt +moy.' pg100.txt +moys; pg100.txt +moys? pg100.txt +mr--er--baxter-- pg3200.txt +mr.----'s pg3200.txt +mr.------, pg3200.txt +mr.--er--" pg3200.txt +mrs pg31100.txt +mrs. pg31100.txt, pg3200.txt +ms pg3200.txt +ms. pg3200.txt +ms., pg3200.txt +much! pg31100.txt, pg100.txt +much!" pg3200.txt +much" pg3200.txt +much) pg3200.txt +much, pg31100.txt, pg3200.txt, pg100.txt +much," pg3200.txt +much- pg100.txt +much--but pg3200.txt +much--but, pg31100.txt +much--even pg3200.txt +much--for pg31100.txt +much--i pg3200.txt +much-talked-of pg3200.txt +much. pg31100.txt, pg3200.txt, pg100.txt +much." pg31100.txt, pg3200.txt +much.' pg3200.txt +much: pg3200.txt, pg100.txt +much; pg31100.txt, pg3200.txt, pg100.txt +much;--and pg31100.txt +much? pg100.txt +much?" pg31100.txt, pg3200.txt +much?' pg3200.txt +muck pg3200.txt +muck-heap, pg3200.txt +muckawow, pg3200.txt +mud pg3200.txt, pg100.txt +mud, pg3200.txt, pg100.txt +mud-colored pg3200.txt +mud-hovels pg3200.txt +mud-stripes pg3200.txt +mud-turkles?" pg3200.txt +mud-turtle pg3200.txt +mud-turtle. pg3200.txt +mud-walls pg3200.txt +mud. pg3200.txt +mud." pg3200.txt +mud; pg100.txt +muddied pg100.txt +muddied, pg100.txt +muddle. pg3200.txt +muddle; pg3200.txt +muddled pg3200.txt +muddy pg3200.txt +muddy. pg100.txt +mudjekeewis! pg3200.txt +mudjikewis, pg3200.txt +mueller, pg3200.txt +muff pg3200.txt +muffled pg3200.txt, pg100.txt +muffler. pg100.txt +muffling pg3200.txt +mug pg3200.txt +mug! pg3200.txt +muggins." pg3200.txt +mugs pg3200.txt +mugwumps pg3200.txt +mugwumps), pg3200.txt +muiex pg3200.txt +mukerji. pg3200.txt +mulatto, pg3200.txt +mulberries; pg100.txt +mulberry pg3200.txt, pg100.txt +mulberry." pg3200.txt +mulberry?" pg3200.txt +mulcted pg3200.txt +mulde? pg3200.txt +mule pg3200.txt +mule's pg3200.txt +mule, pg3200.txt, pg100.txt +mule-mounted pg3200.txt +mule-path pg3200.txt +mule-path, pg3200.txt +mule-race pg3200.txt +mule. pg3200.txt, pg100.txt +mules pg3200.txt, pg100.txt +mules!" pg3200.txt +mules, pg3200.txt, pg100.txt +mules--stage-coaching pg3200.txt +mules. pg100.txt +mulets, pg3200.txt +mullengudgery pg3200.txt +mulmutius pg100.txt +muloowurtie pg3200.txt +multiplication pg3200.txt +multiplicity pg31100.txt, pg3200.txt +multiplied pg3200.txt +multiplied. pg100.txt +multiply pg3200.txt, pg100.txt +multipotent, pg100.txt +multitude pg31100.txt, pg3200.txt, pg100.txt +multitude, pg3200.txt, pg100.txt +multitude. pg3200.txt, pg100.txt +multitude; pg3200.txt +multitudes pg3200.txt +multitudes, pg100.txt +multitudes. pg3200.txt, pg100.txt +multitudes: pg3200.txt +multitudes; pg3200.txt +multitudes? pg3200.txt +multitudinous pg3200.txt +multitudinous, pg3200.txt +mum pg3200.txt, pg100.txt +mum! pg100.txt +mum's pg3200.txt +mum. pg100.txt +mum." pg3200.txt +mum: pg100.txt +mum?" pg3200.txt +mumble, pg3200.txt +mumbled pg3200.txt +mumbled, pg3200.txt +mumbling, pg3200.txt +mumbling: pg3200.txt +mumford pg3200.txt +mummies pg3200.txt +mummies, pg3200.txt +mummy pg3200.txt +mummy! pg3200.txt +mummy!" pg3200.txt +mummy, pg3200.txt +mummy. pg100.txt +mumps. pg3200.txt +mumps." pg3200.txt +mumps?" pg3200.txt +munched pg3200.txt +munching pg3200.txt +mundi." pg3200.txt +mundoora pg3200.txt +munich pg3200.txt +munich). pg3200.txt +munich, pg3200.txt +munich. pg3200.txt +munich.) pg3200.txt +municipal pg3200.txt +munificent pg3200.txt +munition; pg100.txt +murd'red pg100.txt +murder pg31100.txt, pg3200.txt +murder! pg100.txt +murder!!! pg3200.txt +murder'd pg100.txt +murder). pg3200.txt +murder, pg3200.txt +murder--yes, pg3200.txt +murder. pg3200.txt, pg100.txt +murder." pg3200.txt +murder; pg31100.txt, pg3200.txt, pg100.txt +murder? pg100.txt +murder?' pg3200.txt +murdered pg3200.txt +murdered! pg3200.txt +murdered!" pg3200.txt +murdered, pg3200.txt, pg100.txt +murdered,' pg3200.txt +murdered. pg31100.txt, pg3200.txt, pg100.txt +murderer pg3200.txt, pg100.txt +murderer, pg31100.txt, pg100.txt +murderer. pg100.txt +murderer: pg3200.txt +murderer? pg100.txt +murderers pg3200.txt, pg100.txt +murderers! pg100.txt +murderers. pg100.txt +murderers; pg3200.txt, pg100.txt +murderess pg3200.txt +murdering pg3200.txt +murderous pg3200.txt +murderous, pg3200.txt +murderous. pg100.txt +murders pg3200.txt +murders," pg3200.txt +murders. pg3200.txt +murders; pg3200.txt +murfree. pg3200.txt +murillo's pg3200.txt +murky pg3200.txt +murmur pg31100.txt, pg3200.txt +murmur, pg3200.txt +murmur--clear pg3200.txt +murmur. pg3200.txt +murmured pg3200.txt +murmured, pg3200.txt +murmured-- pg3200.txt +murmured: pg3200.txt +murmurers pg100.txt +murmuring, pg3200.txt +murmuring-- pg3200.txt +murmuring: pg3200.txt +murmurous pg3200.txt +murphy pg3200.txt +murphy, pg3200.txt +murrain pg100.txt +murray, pg3200.txt +murray-- pg3200.txt +murriwillumba pg3200.txt +murrubidgee, pg3200.txt +murrumbidgee pg3200.txt +murrurundi pg3200.txt +murrurundi, pg3200.txt +murther pg100.txt +murther! pg100.txt +murther!" pg3200.txt +murther'? pg100.txt +murther'd! pg100.txt +murther'd, pg100.txt +murther'd. pg100.txt +murther'd? pg100.txt +murther, pg100.txt +murther- pg100.txt +murther. pg100.txt +murther; pg100.txt +murther? pg100.txt +murthered pg3200.txt +murtherer, pg100.txt +murtherer. pg100.txt +murtherers, pg100.txt +murtherers. pg100.txt +muscadel, pg100.txt +muscle pg3200.txt +muscle, pg3200.txt +muscle. pg3200.txt +muscles pg3200.txt +muscles, pg3200.txt +muscles.--there pg3200.txt +muscovites pg3200.txt +muscovits. pg100.txt +muscovy. pg100.txt +muscular pg3200.txt +muscular, pg3200.txt +muscular. pg3200.txt +muse pg3200.txt, pg100.txt +muse" pg3200.txt +muse, pg3200.txt, pg100.txt +mused pg3200.txt +museum pg3200.txt +museum, pg3200.txt +museum--jade-stone pg3200.txt +museum. pg3200.txt +museum." pg3200.txt +museum; pg3200.txt +museums, pg3200.txt +museums; pg3200.txt +musgrove pg31100.txt +musgrove's pg31100.txt +musgrove, pg31100.txt +musgrove. pg31100.txt +musgrove? pg31100.txt +musgroves pg31100.txt +musgroves, pg31100.txt +musgroves." pg31100.txt +mush pg3200.txt +mush, pg3200.txt +mush-and-milk pg3200.txt +mush. pg100.txt +mush." pg3200.txt +mushed pg3200.txt +mushrooms; pg3200.txt +mushy. pg3200.txt +music pg31100.txt, pg3200.txt, pg100.txt +music! pg3200.txt, pg100.txt +music, pg31100.txt, pg3200.txt, pg100.txt +music--and pg3200.txt +music--these pg3200.txt +music-books pg31100.txt +music. pg31100.txt, pg3200.txt, pg100.txt +music." pg31100.txt +music; pg31100.txt, pg3200.txt, pg100.txt +music? pg100.txt +music] pg100.txt +musical pg3200.txt, pg100.txt +musical, pg31100.txt, pg3200.txt, pg100.txt +musical. pg100.txt +musical; pg100.txt +musical?" pg31100.txt +musician. pg100.txt +musician? pg100.txt +musicians pg3200.txt, pg100.txt +musicians, pg100.txt +musicians. pg3200.txt, pg100.txt +musicians? pg100.txt +musicians]. pg100.txt +musing pg31100.txt, pg3200.txt +musing, pg3200.txt +musing. pg31100.txt +musingly, pg3200.txt +musingly: pg3200.txt +musings pg31100.txt, pg3200.txt +musings! pg3200.txt +musings. pg3200.txt +musk, pg100.txt +muskeeters pg3200.txt +musket pg3200.txt +musket. pg3200.txt +musketry. pg3200.txt +muskets. pg3200.txt +muskets; pg3200.txt +muslims, pg3200.txt +muslin pg31100.txt +muslin, pg31100.txt, pg3200.txt +muslin. pg31100.txt +muslin." pg31100.txt +muslin; pg3200.txt +muslins pg31100.txt +muss, pg3200.txt +mussel-shell. pg100.txt +musseum, pg3200.txt +must, pg31100.txt, pg3200.txt, pg100.txt +must-have-beeners, pg3200.txt +must. pg31100.txt, pg3200.txt, pg100.txt +must." pg31100.txt, pg3200.txt +must; pg31100.txt, pg100.txt +mustache, pg3200.txt +mustaches, pg3200.txt +mustard pg31100.txt, pg100.txt +mustard, pg3200.txt, pg100.txt +mustard-plaster pg3200.txt +mustard-seed pg3200.txt +mustard. pg3200.txt, pg100.txt +mustard." pg3200.txt +mustard? pg100.txt +mustardseed pg100.txt +mustardseed! pg100.txt +mustardseed, pg100.txt +mustardseed. pg100.txt +mustardseed? pg100.txt +mustashes pg3200.txt +muster pg3200.txt, pg100.txt +muster'd pg100.txt +muster-book. pg100.txt +mustered pg3200.txt +mustering pg3200.txt +mustn't pg3200.txt +musty pg100.txt +musty. pg100.txt +mutability, pg100.txt +mutations pg3200.txt +mute, pg100.txt +mute. pg100.txt +mutely pg3200.txt +mutely: pg3200.txt +muteness pg3200.txt +mutes, pg100.txt +mutilated pg3200.txt +mutilation pg3200.txt +mutilation. pg3200.txt +mutilation." pg3200.txt +mutilations, pg3200.txt +mutineer pg3200.txt +mutineers, pg3200.txt, pg100.txt +mutinies; pg100.txt +mutinies? pg100.txt +mutiny pg3200.txt, pg100.txt +mutiny, pg3200.txt, pg100.txt +mutiny. pg3200.txt, pg100.txt +mutius pg100.txt +mutius; pg100.txt +mutter pg3200.txt +mutter. pg3200.txt +mutter: pg3200.txt +mutter? pg100.txt +muttered pg3200.txt, pg100.txt +muttered, pg3200.txt +muttered-- pg3200.txt +muttered: pg3200.txt +muttering pg3200.txt +muttering, pg3200.txt +muttering-- pg3200.txt +muttering: pg3200.txt +mutterings pg3200.txt +mutton pg31100.txt, pg3200.txt, pg100.txt +mutton, pg31100.txt, pg3200.txt +mutton--when pg3200.txt +mutton. pg3200.txt +mutton? pg100.txt +muttons pg3200.txt +muttons. pg100.txt +mutual pg31100.txt, pg3200.txt +mutual, pg31100.txt +mutual--" pg31100.txt +mutual; pg31100.txt +mutually pg31100.txt +mutually. pg100.txt +mutually; pg100.txt +muzzl'd, pg100.txt +muzzle pg3200.txt, pg100.txt +my! pg3200.txt +my!" pg3200.txt +my, pg3200.txt, pg100.txt +my- pg100.txt +my--" pg3200.txt +mynheers? pg100.txt +myponga pg3200.txt +myriads pg3200.txt +myrmidon, pg100.txt +myrmidons pg100.txt +myrmidons, pg100.txt +myrmidons; pg100.txt +myrtles: pg3200.txt +mysef. pg3200.txt +myself pg31100.txt, pg3200.txt, pg100.txt +myself! pg31100.txt, pg100.txt +myself!" pg3200.txt +myself" pg3200.txt +myself"; pg3200.txt +myself) pg31100.txt +myself, pg31100.txt, pg3200.txt, pg100.txt +myself,- pg100.txt +myself- pg100.txt +myself-- pg3200.txt +myself--" pg31100.txt +myself--and pg3200.txt +myself--except pg31100.txt +myself--if pg3200.txt +myself--that pg3200.txt +myself--virginia. pg3200.txt +myself. pg31100.txt, pg3200.txt, pg100.txt +myself." pg31100.txt, pg3200.txt +myself.' pg3200.txt +myself.--i pg31100.txt +myself..... pg3200.txt +myself: pg31100.txt, pg3200.txt, pg100.txt +myself; pg31100.txt, pg3200.txt, pg100.txt +myself? pg31100.txt, pg3200.txt, pg100.txt +myself?" pg3200.txt +mysteries pg3200.txt +mysteries, pg3200.txt +mysteries. pg31100.txt +mysteries; pg3200.txt +mysteries? pg100.txt +mysterious pg3200.txt +mysterious, pg3200.txt +mysterious--but pg3200.txt +mysterious-like, pg3200.txt +mysteriously pg3200.txt +mysteriously, pg3200.txt +mysteriously." pg31100.txt +mystery pg3200.txt, pg100.txt +mystery! pg3200.txt +mystery, pg31100.txt, pg3200.txt +mystery. pg3200.txt, pg100.txt +mystery." pg3200.txt +mystery; pg100.txt +mystery? pg31100.txt, pg3200.txt, pg100.txt +mystic pg3200.txt +myth pg3200.txt +myths pg3200.txt +myths, pg3200.txt +n--and pg3200.txt +n.e., pg3200.txt +n.h., pg3200.txt +naaman pg3200.txt +nabobs pg3200.txt +nach pg3200.txt +nag, pg3200.txt, pg100.txt +nagging pg3200.txt +nagging, pg3200.txt +nagging. pg3200.txt +nags? pg100.txt +nahsty." pg3200.txt +nail pg3200.txt +nail'd pg100.txt +nail, pg3200.txt, pg100.txt +nail-head pg3200.txt +nail-heads pg3200.txt +nail." pg31100.txt +nail; pg3200.txt, pg100.txt +nailed pg3200.txt +nails pg3200.txt, pg100.txt +nails, pg3200.txt, pg100.txt +nails. pg100.txt +nain, pg3200.txt +naive pg3200.txt +naive, pg3200.txt +naive. pg3200.txt +naively pg3200.txt +naivete, pg31100.txt +naked pg3200.txt, pg100.txt +naked, pg3200.txt +naked. pg3200.txt, pg100.txt +naked; pg3200.txt +nakedness? pg100.txt +nam'd, pg100.txt +nam'd- pg100.txt +name pg31100.txt, pg3200.txt, pg100.txt +name! pg3200.txt, pg100.txt +name!" pg31100.txt, pg3200.txt +name!' pg3200.txt +name" pg3200.txt +name, pg31100.txt, pg3200.txt, pg100.txt +name," pg3200.txt +name- pg100.txt +name--" pg3200.txt +name--. pg31100.txt +name--blennerhasset, pg3200.txt +name--george pg3200.txt +name--islands pg3200.txt +name--it pg3200.txt +name--jehanne. pg3200.txt +name--so, pg3200.txt +name--supreme. pg3200.txt +name--thus: pg3200.txt +name--was pg3200.txt +name. pg31100.txt, pg3200.txt, pg100.txt +name." pg31100.txt, pg3200.txt +name.' pg3200.txt +name: pg3200.txt, pg100.txt +name; pg31100.txt, pg3200.txt, pg100.txt +name? pg3200.txt, pg100.txt +name?" pg31100.txt, pg3200.txt +name?' pg3200.txt +named pg31100.txt, pg3200.txt +named, pg3200.txt +named--brown, pg3200.txt +named--i pg3200.txt +named. pg3200.txt +named; pg31100.txt, pg3200.txt +nameless pg3200.txt +namely pg3200.txt +namely, pg3200.txt +namely: pg3200.txt +names pg31100.txt, pg3200.txt, pg100.txt +names!--according pg3200.txt +names, pg31100.txt, pg3200.txt, pg100.txt +names--for pg3200.txt +names. pg31100.txt, pg3200.txt, pg100.txt +names." pg3200.txt +names: pg100.txt +names; pg3200.txt, pg100.txt +names? pg3200.txt, pg100.txt +names?" pg3200.txt +namesake. pg3200.txt +naming pg31100.txt, pg3200.txt +naming. pg100.txt +nan. pg100.txt +nana pg3200.txt +nana's pg3200.txt +nancy pg3200.txt +nancy! pg3200.txt +nancy, pg3200.txt +nancy," pg3200.txt +nancy----" pg3200.txt +nancy." pg3200.txt +nangkita pg3200.txt +nangwarry pg3200.txt +nap pg3200.txt, pg100.txt +nap, pg3200.txt, pg100.txt +nap. pg3200.txt, pg100.txt +napes pg100.txt +napkin pg3200.txt +napkin, pg100.txt +napkin; pg100.txt +napkin? pg100.txt +napkins pg3200.txt +napkins. pg3200.txt, pg100.txt +naples pg31100.txt, pg3200.txt, pg100.txt +naples, pg3200.txt, pg100.txt +naples--quarantined. pg3200.txt +naples. pg3200.txt, pg100.txt +naples? pg100.txt +napless pg3200.txt +napless, pg3200.txt +napoleon pg3200.txt +napoleon!' pg3200.txt +napoleon's pg3200.txt +napoleon's. pg3200.txt +napoleon, pg3200.txt +napoleon. pg3200.txt +napoleon.' pg3200.txt +napoleon; pg3200.txt +napoleon?" pg3200.txt +napoleonic pg3200.txt +napped pg3200.txt +naracoorte pg3200.txt +narbon. pg100.txt +narbon? pg100.txt +narghili, pg3200.txt +narrandera pg3200.txt +narration pg31100.txt +narration, pg31100.txt +narrative pg3200.txt +narrative, pg31100.txt, pg3200.txt +narrative--and pg3200.txt +narrative. pg3200.txt +narrative: pg3200.txt +narrative; pg3200.txt +narrow pg31100.txt, pg3200.txt, pg100.txt +narrow, pg3200.txt, pg100.txt +narrow-edged pg3200.txt +narrow. pg3200.txt +narrowed pg3200.txt +narrowest pg3200.txt +narrowly, pg3200.txt, pg100.txt +narrowness, pg3200.txt +narrows pg3200.txt +nasby pg3200.txt +nash pg31100.txt +nash, pg31100.txt +nashville pg3200.txt +nast. pg3200.txt +nasty pg3200.txt +nasty. pg3200.txt +nasty." pg3200.txt +nasty.] pg3200.txt +nat pg3200.txt +nat's pg3200.txt +natal pg3200.txt +natal--hair pg3200.txt +natchez pg3200.txt +natchez-under- pg3200.txt +nathaniel? pg100.txt +nation pg3200.txt, pg100.txt +nation!" pg3200.txt +nation' pg3200.txt +nation's pg3200.txt +nation, pg3200.txt, pg100.txt +nation- pg100.txt +nation-- pg3200.txt +nation--pulpit pg3200.txt +nation. pg3200.txt, pg100.txt +nation." pg31100.txt, pg3200.txt +nation; pg3200.txt, pg100.txt +nation? pg100.txt +national pg3200.txt +national, pg3200.txt +nationalists, pg3200.txt +nationalities pg3200.txt +nationalities, pg3200.txt +nationality pg3200.txt +nationality, pg3200.txt +nationality. pg3200.txt +nations pg3200.txt +nations, pg3200.txt +nations--the pg3200.txt +nations. pg3200.txt +nations; pg3200.txt, pg100.txt +native pg31100.txt, pg3200.txt, pg100.txt +native, pg3200.txt +native. pg3200.txt +native." pg3200.txt +native; pg3200.txt +natives pg3200.txt +natives, pg3200.txt +natives,) pg3200.txt +natives. pg3200.txt +nativity pg3200.txt, pg100.txt +nativity! pg100.txt +nativity, pg100.txt +nativity. pg100.txt +natt pg3200.txt +natty pg3200.txt +natural pg31100.txt, pg3200.txt, pg100.txt +natural! pg100.txt +natural": pg100.txt +natural, pg31100.txt, pg3200.txt, pg100.txt +natural. pg31100.txt, pg3200.txt, pg100.txt +natural." pg31100.txt, pg3200.txt +natural.--what! pg31100.txt +natural; pg31100.txt, pg3200.txt +naturalist pg3200.txt +naturalist's pg3200.txt +naturalist. pg3200.txt +naturalists, pg3200.txt +naturally pg31100.txt, pg3200.txt +naturally, pg3200.txt +naturally. pg31100.txt, pg3200.txt +naturally." pg31100.txt +naturally: pg3200.txt +naturally; pg31100.txt, pg3200.txt +nature pg31100.txt, pg3200.txt, pg100.txt +nature! pg31100.txt, pg100.txt +nature'- pg100.txt +nature's pg3200.txt, pg100.txt +nature's: pg100.txt +nature, pg31100.txt, pg3200.txt, pg100.txt +nature-- pg3200.txt +nature--she pg31100.txt +nature-not pg100.txt +nature. pg31100.txt, pg3200.txt, pg100.txt +nature." pg31100.txt, pg3200.txt +nature.' pg3200.txt +nature: pg3200.txt +nature:-- pg31100.txt +nature; pg31100.txt, pg3200.txt, pg100.txt +nature? pg100.txt +nature?" pg31100.txt, pg3200.txt +natured pg31100.txt +natured, pg31100.txt +natured. pg3200.txt +natures pg3200.txt, pg100.txt +natures, pg3200.txt +natures. pg100.txt +naught pg3200.txt +naught! pg3200.txt +naught,' pg3200.txt +naught- pg100.txt +naught--the pg3200.txt +naught. pg100.txt +naught; pg100.txt +naughtily. pg100.txt +naughty pg100.txt +nauseated, pg3200.txt +nauseating pg3200.txt +nauseating. pg3200.txt +nautical pg3200.txt +nautilus pg3200.txt +naval pg31100.txt, pg3200.txt +navarre pg100.txt +navarre, pg3200.txt +navarre. pg100.txt +navel. pg3200.txt +navigate pg3200.txt +navigate. pg3200.txt +navigation pg3200.txt +navigation, pg3200.txt +navigation--these pg3200.txt +navigation." pg3200.txt +navigation?' pg3200.txt +navy pg31100.txt, pg3200.txt, pg100.txt +navy, pg3200.txt +navy; pg31100.txt +nay pg31100.txt, pg3200.txt +nay! pg100.txt +nay" pg31100.txt +nay, pg31100.txt, pg3200.txt, pg100.txt +nay- pg100.txt +nay. pg100.txt +nay: pg100.txt +nay; pg100.txt +nay? pg100.txt +nay?" pg3200.txt +naylor pg3200.txt +nays!" pg3200.txt +nays. pg3200.txt +nayward. pg100.txt +nazareth pg3200.txt +nazareth, pg3200.txt +nazareth. pg3200.txt +nazareth? pg3200.txt +ne'er pg3200.txt, pg100.txt +neal, pg3200.txt +neapolitan pg31100.txt, pg3200.txt, pg100.txt +neapolitan, pg100.txt +neapolitans pg3200.txt, pg100.txt +near pg31100.txt, pg3200.txt, pg100.txt +near! pg100.txt +near, pg31100.txt, pg3200.txt, pg100.txt +near- pg100.txt +near--his pg3200.txt +near-failures pg3200.txt +near-sighted pg3200.txt +near. pg31100.txt, pg3200.txt, pg100.txt +near." pg31100.txt, pg3200.txt +near: pg31100.txt, pg100.txt +near; pg31100.txt, pg100.txt +near? pg100.txt +nearby pg3200.txt +neared pg3200.txt +nearer pg31100.txt, pg3200.txt, pg100.txt +nearer! pg31100.txt +nearer, pg31100.txt, pg3200.txt +nearer--growing pg3200.txt +nearer. pg3200.txt, pg100.txt +nearer.--and pg31100.txt +nearer; pg3200.txt +nearer?" pg31100.txt +nearest pg31100.txt, pg3200.txt, pg100.txt +nearest, pg3200.txt, pg100.txt +nearing pg3200.txt +nearly pg31100.txt, pg3200.txt, pg100.txt +nearly. pg100.txt +nearness pg3200.txt +nearness, pg3200.txt +neat pg31100.txt, pg3200.txt +neat's pg100.txt +neat, pg31100.txt, pg3200.txt +neat--i pg3200.txt +neat. pg3200.txt +neat; pg100.txt +neat? pg3200.txt, pg100.txt +neatest pg3200.txt +neatest, pg3200.txt +neatly pg3200.txt +neatly, pg3200.txt +neatly. pg3200.txt, pg100.txt +neatness pg3200.txt +nebula, pg3200.txt +necessaries pg31100.txt, pg3200.txt, pg100.txt +necessaries, pg100.txt +necessarily pg31100.txt, pg3200.txt, pg100.txt +necessarily, pg3200.txt +necessary pg31100.txt, pg3200.txt, pg100.txt +necessary, pg31100.txt, pg3200.txt, pg100.txt +necessary--and pg31100.txt +necessary--details pg3200.txt +necessary. pg31100.txt, pg3200.txt, pg100.txt +necessary." pg31100.txt, pg3200.txt +necessary; pg31100.txt, pg3200.txt +necessitated pg3200.txt +necessities pg3200.txt, pg100.txt +necessities, pg100.txt +necessities. pg3200.txt, pg100.txt +necessities; pg100.txt +necessities? pg100.txt +necessity pg31100.txt, pg3200.txt, pg100.txt +necessity! pg100.txt +necessity, pg31100.txt, pg3200.txt, pg100.txt +necessity. pg100.txt +necessity." pg3200.txt +necessity; pg3200.txt, pg100.txt +neck pg3200.txt, pg100.txt +neck!" pg3200.txt +neck, pg31100.txt, pg3200.txt, pg100.txt +neck- pg100.txt +neck--all pg3200.txt +neck-ties, pg3200.txt +neck. pg3200.txt, pg100.txt +neck." pg3200.txt +neck; pg31100.txt, pg3200.txt +neck? pg100.txt +neck?" pg3200.txt +neck] pg100.txt +neckar pg3200.txt +neckar, pg3200.txt +neckar] pg3200.txt +neckarsteinach pg3200.txt +necklace pg31100.txt +necklace, pg31100.txt +necklace," pg31100.txt +necklace. pg31100.txt +necklace." pg31100.txt, pg3200.txt +necklaces pg31100.txt +necklaces, pg3200.txt +necklaces. pg31100.txt +necks pg3200.txt, pg100.txt +necks, pg3200.txt, pg100.txt +necks; pg100.txt +necktie pg3200.txt +necktie, pg3200.txt +necrologies pg3200.txt +necromancer pg31100.txt +necromancer's pg3200.txt +ned pg100.txt +ned, pg3200.txt +ned. pg100.txt +need pg31100.txt, pg3200.txt, pg100.txt +need! pg3200.txt, pg100.txt +need'st pg3200.txt, pg100.txt +need, pg3200.txt, pg100.txt +need- pg100.txt +need--had pg3200.txt +need--hel-lo!" pg3200.txt +need. pg31100.txt, pg3200.txt, pg100.txt +need." pg3200.txt +need; pg3200.txt, pg100.txt +need? pg100.txt +needed pg31100.txt, pg3200.txt +needed, pg3200.txt +needed. pg3200.txt +needed; pg3200.txt +needer. pg100.txt +needest pg3200.txt +needful pg3200.txt, pg100.txt +needful, pg100.txt +needful. pg100.txt +needful: pg100.txt +needing. pg100.txt +needle pg3200.txt +needle, pg3200.txt +needle-work; pg100.txt +needle; pg100.txt +needlefuls pg31100.txt +needlelike pg3200.txt +needles pg3200.txt +needles; pg3200.txt +needless pg31100.txt, pg100.txt +needless, pg31100.txt +needless. pg31100.txt +needless; pg31100.txt +needlework, pg31100.txt +needn't pg3200.txt +needs pg31100.txt, pg3200.txt, pg100.txt +needs, pg100.txt +needs---- pg3200.txt +needs. pg3200.txt +needs." pg31100.txt +negative pg3200.txt +negative, pg100.txt +negatived, pg31100.txt +negatives pg100.txt +negatives, pg31100.txt +neglect pg31100.txt, pg3200.txt, pg100.txt +neglect, pg31100.txt, pg3200.txt +neglect- pg100.txt +neglect. pg31100.txt, pg3200.txt +neglect; pg3200.txt +neglected pg31100.txt, pg3200.txt +neglected, pg31100.txt +neglected--to pg3200.txt +neglected. pg31100.txt, pg3200.txt +neglected." pg31100.txt +neglected; pg100.txt +neglecting pg31100.txt, pg3200.txt, pg100.txt +negligence pg31100.txt +negligence, pg100.txt +negligence. pg100.txt +negligence; pg31100.txt +negligent pg31100.txt +negligent, pg31100.txt +negligent. pg100.txt +negotiable pg3200.txt +negotiate pg3200.txt, pg100.txt +negotiating pg3200.txt +negotiation pg31100.txt +negotiation. pg31100.txt +negro pg3200.txt +negro's pg3200.txt +negro, pg3200.txt +negro-stealers, pg3200.txt +negro. pg3200.txt +negro; pg3200.txt +negroes pg3200.txt +negroes, pg3200.txt +negroes--and pg3200.txt +negroes. pg3200.txt +negroes; pg3200.txt +negros, pg3200.txt +negus, pg31100.txt +neice pg31100.txt +neigh pg100.txt +neigh! pg100.txt +neigh, pg100.txt +neighbor pg3200.txt +neighbor's pg3200.txt +neighbor, pg3200.txt +neighbor--for pg3200.txt +neighbor. pg3200.txt +neighbor: pg3200.txt +neighbor; pg3200.txt +neighborhood pg3200.txt +neighborhood! pg3200.txt +neighborhood, pg3200.txt +neighborhood. pg3200.txt +neighborhood; pg3200.txt +neighborhood?' pg3200.txt +neighboring pg3200.txt +neighborly pg3200.txt +neighbors pg3200.txt, pg100.txt +neighbors, pg3200.txt +neighbors. pg3200.txt +neighbors." pg3200.txt +neighbour pg31100.txt, pg100.txt +neighbour! pg100.txt +neighbour!' pg31100.txt +neighbour, pg31100.txt, pg100.txt +neighbour--no pg3200.txt +neighbour. pg31100.txt, pg100.txt +neighbour." pg31100.txt +neighbour; pg100.txt +neighbour? pg100.txt +neighbourhood pg31100.txt, pg100.txt +neighbourhood, pg31100.txt, pg100.txt +neighbourhood. pg31100.txt, pg3200.txt, pg100.txt +neighbourhood." pg31100.txt +neighbourhood; pg31100.txt +neighbourly? pg100.txt +neighbours pg31100.txt, pg3200.txt +neighbours' pg31100.txt +neighbours) pg31100.txt +neighbours, pg31100.txt, pg100.txt +neighbours. pg31100.txt, pg100.txt +neighbours." pg31100.txt +neighs pg100.txt +neil, pg3200.txt +nein! pg3200.txt +nein, pg3200.txt +neither pg31100.txt, pg3200.txt, pg100.txt +neither! pg3200.txt +neither, pg3200.txt, pg100.txt +neither- pg100.txt +neither. pg31100.txt, pg3200.txt, pg100.txt +neither.' pg3200.txt +neither; pg31100.txt, pg100.txt +neither? pg100.txt +nek--every pg3200.txt +nell pg100.txt +nell. pg100.txt +nelson pg3200.txt +nelson. pg3200.txt +nemesis? pg100.txt +nephew pg31100.txt, pg3200.txt, pg100.txt +nephew! pg31100.txt +nephew!' pg3200.txt +nephew's pg31100.txt, pg3200.txt +nephew, pg31100.txt, pg100.txt +nephew--the pg3200.txt +nephew. pg3200.txt +nephew; pg31100.txt +nephew? pg100.txt +nephi pg3200.txt +nephi, pg3200.txt +nephi. pg3200.txt +nepotism pg3200.txt +neptune pg100.txt +neptune, pg100.txt +neptune. pg3200.txt +neptune? pg31100.txt +nere pg3200.txt +nereides, pg100.txt +nerissa pg100.txt +nerissa. pg100.txt +nerissa; pg100.txt +nerissa? pg100.txt +nero, pg3200.txt, pg100.txt +nerve pg3200.txt +nerve--that pg3200.txt +nerve. pg3200.txt, pg100.txt +nerveless-grasp. pg3200.txt +nerves pg31100.txt, pg3200.txt, pg100.txt +nerves, pg31100.txt, pg100.txt +nerves. pg31100.txt, pg3200.txt +nerves." pg31100.txt +nervii. pg100.txt +nervious, pg3200.txt +nervous pg31100.txt, pg3200.txt +nervous, pg31100.txt +nervous. pg3200.txt +nervously pg3200.txt +nervously." pg3200.txt +nest pg3200.txt, pg100.txt +nest" pg3200.txt +nest, pg100.txt +nest. pg3200.txt, pg100.txt +nest; pg100.txt +neste? pg3200.txt +nestled pg3200.txt +nestling pg3200.txt +nestling, pg3200.txt +nestor pg100.txt +nestor! pg100.txt +nestor, pg100.txt +nestor. pg100.txt +nests pg3200.txt +nests, pg31100.txt, pg3200.txt +nests; pg100.txt +net pg3200.txt, pg100.txt +net. pg100.txt +nether pg3200.txt +nether-stocks. pg100.txt +netherfield pg31100.txt +netherfield, pg31100.txt +netherfield," pg31100.txt +netherfield. pg31100.txt +netherfield." pg31100.txt +netherfield?" pg31100.txt +netherlands? pg100.txt +nets; pg100.txt +netting-cotton, pg31100.txt +nettle pg100.txt +nettle, pg100.txt +nettle-seed. pg100.txt +nettled pg3200.txt +nettled, pg31100.txt +nettled. pg100.txt +nettles pg31100.txt +neun; pg3200.txt +neuter. pg3200.txt +neutral! pg3200.txt +neutral!! pg3200.txt +neutral. pg3200.txt +neutrality pg3200.txt +neutrality. pg3200.txt +neutralized pg3200.txt +nevada pg3200.txt +nevada). pg3200.txt +nevada, pg3200.txt +nevada. pg3200.txt +nevada: pg3200.txt +nevada; pg3200.txt +nevadas. pg3200.txt +never! pg3200.txt, pg100.txt +never!" pg3200.txt +never, pg31100.txt, pg3200.txt, pg100.txt +never- pg100.txt +never--" pg3200.txt +never-ceasing pg31100.txt +never-diminishing pg3200.txt +never-failing pg31100.txt +never-never pg3200.txt +never-the-less. pg3200.txt +never-to-be-forgotten pg3200.txt +never. pg31100.txt, pg3200.txt, pg100.txt +never." pg31100.txt +never? pg100.txt +nevermore pg3200.txt +nevertheless pg3200.txt +nevertheless, pg3200.txt +nevertheless. pg3200.txt +nevertheless." pg3200.txt +nevertheless.--m.t. pg3200.txt +nevertheless; pg3200.txt +nevils; pg100.txt +nevis--[laughter]--the pg3200.txt +new pg31100.txt, pg3200.txt, pg100.txt +new! pg3200.txt, pg100.txt +new, pg3200.txt, pg100.txt +new- pg3200.txt +new--even pg31100.txt +new--to pg3200.txt +new-apparell'd? pg100.txt +new-begot. pg100.txt +new-born pg3200.txt +new-born, pg100.txt +new-burn'd. pg100.txt +new-comer--peterson. pg3200.txt +new-comers pg3200.txt +new-crept pg100.txt +new-crown'd, pg100.txt +new-fangled pg100.txt +new-fired, pg100.txt +new-named pg3200.txt +new-paints pg3200.txt +new-ta'en pg100.txt +new-yorker pg3200.txt +new-yorkers pg3200.txt +new. pg3200.txt, pg100.txt +new." pg3200.txt +new: pg100.txt +new; pg3200.txt, pg100.txt +new? pg100.txt +new?" pg3200.txt +newark pg3200.txt +newbern pg3200.txt +newbury, pg31100.txt +newby pg31100.txt, pg3200.txt, pg100.txt +newcastle pg3200.txt +newest pg3200.txt +newfoundland pg31100.txt +newgate, pg3200.txt +newly pg31100.txt, pg3200.txt, pg100.txt +newmarket, pg31100.txt +newness pg31100.txt, pg100.txt +newness, pg100.txt +newport pg3200.txt +newport; pg3200.txt +newrleans pg3200.txt +news pg31100.txt, pg3200.txt, pg100.txt +news! pg100.txt +news!- pg100.txt +news, pg31100.txt, pg3200.txt, pg100.txt +news-- pg3200.txt +news--and pg31100.txt +news-cramm'd. pg100.txt +news-scraps pg3200.txt +news. pg31100.txt, pg3200.txt, pg100.txt +news." pg31100.txt, pg3200.txt +news: pg3200.txt +news; pg31100.txt, pg3200.txt, pg100.txt +news? pg100.txt +news?" pg31100.txt, pg3200.txt +newsboys pg3200.txt +newsmongers, pg100.txt +newspaper pg31100.txt, pg3200.txt +newspaper, pg31100.txt, pg3200.txt +newspaper-man pg3200.txt +newspaper-offices, pg3200.txt +newspaper. pg31100.txt, pg3200.txt +newspaper: pg3200.txt +newspapers pg31100.txt, pg3200.txt +newspapers!" pg3200.txt +newspapers, pg3200.txt +newspapers--new pg3200.txt +newspapers. pg3200.txt +newspapers." pg3200.txt +newspapers: pg3200.txt +newspapers; pg3200.txt +newspapers? pg3200.txt +newspapers?" pg3200.txt +newton pg3200.txt +nex' pg3200.txt +next pg31100.txt, pg3200.txt, pg100.txt +next! pg3200.txt +next!" pg3200.txt +next, pg31100.txt, pg3200.txt, pg100.txt +next- pg100.txt +next-day's pg3200.txt +next-station pg3200.txt +next-to-impossible pg3200.txt +next. pg3200.txt, pg100.txt +next." pg31100.txt, pg3200.txt +next.' pg3200.txt +next; pg3200.txt +next? pg3200.txt, pg100.txt +next?" pg31100.txt, pg3200.txt +nexus pg3200.txt +niagara pg3200.txt +niagara, pg3200.txt +niagara. pg3200.txt +nibble pg3200.txt +nibbling pg3200.txt +nibbling. pg100.txt +nicanor. pg100.txt +nice pg31100.txt, pg3200.txt, pg100.txt +nice, pg31100.txt, pg3200.txt, pg100.txt +nice. pg3200.txt, pg100.txt +nice." pg31100.txt, pg3200.txt +nice; pg3200.txt +nice?" pg31100.txt +nicely pg100.txt +nicely. pg100.txt +niceness- pg100.txt +nicer pg3200.txt +nicest pg3200.txt +niceties pg3200.txt +nicety pg31100.txt +niche pg3200.txt +nicholas pg3200.txt, pg100.txt +nicholas, pg3200.txt, pg100.txt +nicholas," pg3200.txt +nicholas. pg3200.txt +nicholls pg31100.txt +nichols's." pg3200.txt +nicht pg3200.txt +nicht! pg3200.txt +nicht-- pg3200.txt +nicht. pg3200.txt +nicht? pg3200.txt +nichts." pg3200.txt +nick pg3200.txt +nick. pg100.txt +nickel, pg3200.txt +nickel. pg3200.txt +nickels, pg3200.txt +nickname pg3200.txt +nicknamed pg3200.txt +nicknames pg3200.txt +nickoy; pg3200.txt +nicky's pg3200.txt +nicodemus pg3200.txt +nicodemus's pg3200.txt +nicodemus, pg3200.txt +nie pg3200.txt +niece pg31100.txt, pg3200.txt, pg100.txt +niece's pg31100.txt +niece, pg31100.txt, pg100.txt +niece. pg31100.txt, pg100.txt +niece." pg31100.txt +niece; pg31100.txt +niece? pg100.txt +nieces pg31100.txt, pg3200.txt +nieces' pg31100.txt +nieces, pg31100.txt +nieces. pg31100.txt +nieces; pg100.txt +nifty pg3200.txt +niggard pg100.txt +niggarding: pg100.txt +nigger pg3200.txt +nigger's pg3200.txt +nigger, pg3200.txt +nigger--en--en--" pg3200.txt +nigger-head." pg3200.txt +nigger-stealer! pg3200.txt +nigger. pg3200.txt +nigger." pg3200.txt +nigger.'" pg3200.txt +nigger: pg3200.txt +nigger?" pg3200.txt +niggers pg3200.txt +niggers!" pg3200.txt +niggers, pg3200.txt +niggers. pg3200.txt +niggers?" pg3200.txt +niggro pg3200.txt +niggroes. pg3200.txt +nigh pg3200.txt, pg100.txt +nigh, pg100.txt +nigh. pg100.txt +night pg31100.txt, pg3200.txt, pg100.txt +night! pg31100.txt, pg3200.txt, pg100.txt +night!" pg31100.txt, pg3200.txt +night" pg31100.txt, pg3200.txt +night's pg31100.txt, pg3200.txt, pg100.txt +night) pg100.txt +night, pg31100.txt, pg3200.txt, pg100.txt +night- pg3200.txt, pg100.txt +night-- pg3200.txt +night--" pg3200.txt +night--' pg3200.txt +night--a pg3200.txt +night--and pg3200.txt +night--defects pg3200.txt +night--for pg3200.txt +night--it pg3200.txt +night--mix pg3200.txt +night--there pg31100.txt +night--to pg3200.txt +night-and-day pg3200.txt +night-dress, pg3200.txt +night-foes? pg100.txt +night-key. pg3200.txt +night-moth; pg3200.txt +night-owl pg100.txt +night-sounds pg3200.txt +night. pg31100.txt, pg3200.txt, pg100.txt +night." pg31100.txt, pg3200.txt +night.' pg100.txt +night.--this pg3200.txt +night._"] pg31100.txt +night: pg3200.txt, pg100.txt +night; pg31100.txt, pg3200.txt, pg100.txt +night? pg3200.txt, pg100.txt +night?" pg31100.txt, pg3200.txt +night?' pg3200.txt +night?--what pg3200.txt +nightcap--this pg3200.txt +nightfall pg3200.txt +nightfall, pg3200.txt +nightfall. pg3200.txt +nightgown, pg100.txt +nightgown. pg100.txt +nightgown? pg100.txt +nightingale, pg100.txt +nightingale. pg3200.txt, pg100.txt +nightingale; pg100.txt +nightly pg3200.txt +nightly, pg3200.txt, pg100.txt +nightmare pg3200.txt +nightmare. pg3200.txt +nightmares pg3200.txt +nights pg3200.txt, pg100.txt +nights' pg100.txt +nights, pg31100.txt, pg3200.txt, pg100.txt +nights. pg3200.txt +nights." pg3200.txt +nights.--genesis. pg3200.txt +nights: pg100.txt +nights; pg100.txt +nights? pg100.txt +nightshirt pg3200.txt +nightwork, pg100.txt +nikolaus, pg3200.txt +nikolaus." pg3200.txt +nikolaus; pg3200.txt +nile pg3200.txt, pg100.txt +nile. pg3200.txt, pg100.txt +nile?' pg100.txt +nilometer, pg3200.txt +nilus pg100.txt +nimble. pg100.txt +nimbleness. pg100.txt +nine pg31100.txt, pg3200.txt, pg100.txt +nine, pg3200.txt +nine-mile pg3200.txt +nine-tenths pg3200.txt +nine-thirty pg3200.txt +nine. pg3200.txt, pg100.txt +nine." pg3200.txt +nine; pg3200.txt, pg100.txt +nine?" pg31100.txt +nines: pg3200.txt +nineteen pg31100.txt, pg3200.txt +nineteen, pg31100.txt, pg3200.txt +nineteen. pg3200.txt +nineteen." pg3200.txt +nineteen: pg31100.txt +nineteenth pg3200.txt +nineteenth! pg3200.txt +ninety pg3200.txt +ninety, pg3200.txt +ninety--finds pg3200.txt +ninety-four pg3200.txt +ninety-fourth pg3200.txt +ninety-nine pg3200.txt +ninety-six pg3200.txt +ninety-six; pg3200.txt +nip pg3200.txt +nipping pg3200.txt +nipple pg100.txt +nipples, pg3200.txt +nit! pg100.txt +nite pg3200.txt +nitric pg3200.txt +nitroglycerin. pg3200.txt +nixon, pg3200.txt +no! pg31100.txt, pg3200.txt, pg100.txt +no!" pg31100.txt, pg3200.txt +no, pg31100.txt, pg3200.txt, pg100.txt +no,"--recollecting pg31100.txt +no--" pg3200.txt +no--a pg31100.txt +no--drownded pg3200.txt +no--for pg3200.txt +no--not pg3200.txt +no--one pg3200.txt +no--pray pg31100.txt +no--there pg3200.txt +no--to pg3200.txt +no-account, pg3200.txt +no-account? pg3200.txt +no. pg3200.txt, pg100.txt +no." pg31100.txt, pg3200.txt +no.... pg3200.txt +no.70 pg3200.txt +no: pg3200.txt +no; pg31100.txt, pg3200.txt, pg100.txt +no? pg100.txt +no?" pg31100.txt +noah pg3200.txt, pg100.txt +noah. pg3200.txt +noakes pg3200.txt +nobbiest. pg3200.txt +nobby pg3200.txt +nobilities, pg3200.txt +nobilities." pg3200.txt +nobility pg3200.txt, pg100.txt +nobility! pg100.txt +nobility, pg3200.txt, pg100.txt +nobility. pg3200.txt, pg100.txt +nobility; pg100.txt +nobility?" pg3200.txt +noble pg31100.txt, pg3200.txt, pg100.txt +noble!" pg3200.txt +noble's pg3200.txt +noble, pg3200.txt, pg100.txt +noble--. pg3200.txt +noble--and pg3200.txt +noble-natured, pg3200.txt +noble. pg3200.txt, pg100.txt +noble; pg3200.txt, pg100.txt +noble? pg100.txt +nobleman pg3200.txt, pg100.txt +nobleman! pg100.txt +nobleman, pg100.txt +nobleman--" pg3200.txt +nobleman--how pg3200.txt +nobleman? pg100.txt +noblemen pg3200.txt, pg100.txt +noblemen, pg100.txt +nobleness pg31100.txt, pg100.txt +nobleness! pg100.txt +nobleness. pg100.txt +nobler pg3200.txt +nobler. pg100.txt +nobles pg3200.txt, pg100.txt +nobles, pg3200.txt, pg100.txt +nobles. pg3200.txt, pg100.txt +nobles; pg100.txt +noblesse pg100.txt +noblest pg3200.txt, pg100.txt +nobly pg3200.txt, pg100.txt +nobly. pg100.txt +nobody pg31100.txt, pg3200.txt, pg100.txt +nobody's pg3200.txt +nobody's. pg3200.txt +nobody, pg3200.txt +nobody--and pg31100.txt +nobody. pg3200.txt, pg100.txt +nobody." pg31100.txt, pg3200.txt +nobody? pg3200.txt +nod pg31100.txt, pg3200.txt, pg100.txt +nod. pg100.txt +nod? pg100.txt +nodded pg3200.txt +nodded, pg3200.txt, pg100.txt +nodding pg31100.txt, pg3200.txt +noding, pg3200.txt +nods pg31100.txt, pg3200.txt +nods, pg31100.txt, pg3200.txt, pg100.txt +noel pg3200.txt +noel, pg3200.txt +noel." pg3200.txt +noes!' pg3200.txt +noes. pg100.txt +nohow. pg3200.txt +noir, pg3200.txt +noir--" pg3200.txt +noir." pg3200.txt +nois'd pg100.txt +noise pg31100.txt, pg3200.txt, pg100.txt +noise! pg100.txt +noise, pg31100.txt, pg3200.txt, pg100.txt +noise--drown pg3200.txt +noise--perhaps pg3200.txt +noise--the pg31100.txt +noise. pg31100.txt, pg3200.txt, pg100.txt +noise." pg31100.txt, pg3200.txt +noise.) pg3200.txt +noise: pg3200.txt +noise; pg3200.txt, pg100.txt +noise? pg100.txt +noise?" pg3200.txt +noised pg3200.txt +noiseless pg3200.txt +noisemaker; pg100.txt +noises pg3200.txt, pg100.txt +noises, pg3200.txt, pg100.txt +noises. pg3200.txt +noises..... pg3200.txt +noisier pg31100.txt, pg3200.txt +noisily, pg3200.txt +noisy pg3200.txt +noisy, pg31100.txt +noisy. pg31100.txt +noisy." pg31100.txt +nolda) pg3200.txt +nom pg3200.txt +nom, pg3200.txt +nombril pg3200.txt +nomenclature pg3200.txt +nominal pg31100.txt +nominate pg3200.txt +nominated, pg100.txt +nomination. pg100.txt +non- pg3200.txt +non-association pg3200.txt +non-coherent; pg3200.txt +non-combatants--a pg3200.txt +non-cultivation, pg3200.txt +non-existence pg31100.txt +non-existences pg3200.txt +non-existent pg3200.txt +non-existent! pg3200.txt +non-fiction pg3200.txt +non-necessities. pg3200.txt +non-participants. pg3200.txt +non-participants.' pg3200.txt +non-plussed. pg3200.txt +non-resistance pg3200.txt +nonce; pg100.txt +nonchalance pg3200.txt +nonchalance: pg3200.txt +none pg31100.txt, pg3200.txt, pg100.txt +none! pg100.txt +none!" pg3200.txt +none!--none pg3200.txt +none'. pg100.txt +none, pg3200.txt, pg100.txt +none- pg100.txt +none. pg31100.txt, pg3200.txt, pg100.txt +none." pg31100.txt, pg3200.txt +none: pg100.txt +none; pg31100.txt, pg3200.txt, pg100.txt +none? pg100.txt +nonesuch, pg3200.txt +nonesuch--three-night pg3200.txt +nonexistences pg3200.txt +nonino, pg100.txt +nonny. pg100.txt +nonpareil! pg100.txt +nonpareil, pg3200.txt +nonpareil. pg100.txt +nonsense pg31100.txt, pg3200.txt +nonsense!" pg31100.txt, pg3200.txt +nonsense!--leave pg3200.txt +nonsense, pg3200.txt +nonsense--no, pg3200.txt +nonsense--sixty pg3200.txt +nonsense. pg31100.txt, pg3200.txt +nonsense." pg31100.txt, pg3200.txt +nonsense; pg31100.txt +nonsense? pg31100.txt, pg3200.txt +nonsensical. pg31100.txt +nony) pg100.txt +nook pg3200.txt, pg100.txt +nookes--hang pg3200.txt +noon pg31100.txt, pg3200.txt, pg100.txt +noon!" pg3200.txt +noon, pg3200.txt, pg100.txt +noon,(but--so pg3200.txt +noon--" pg3200.txt +noon. pg31100.txt, pg3200.txt, pg100.txt +noon." pg3200.txt +noon.' pg3200.txt +noon: pg100.txt +noon; pg3200.txt +noon?" pg3200.txt +noonday pg3200.txt +noonday, pg3200.txt +nooned pg3200.txt +noose. pg3200.txt +nor pg31100.txt, pg3200.txt, pg100.txt +nor, pg31100.txt +norf, pg3200.txt +norfolk pg31100.txt, pg3200.txt, pg100.txt +norfolk! pg100.txt +norfolk, pg31100.txt, pg3200.txt, pg100.txt +norfolk. pg3200.txt, pg100.txt +norfolk." pg31100.txt +norfolk; pg100.txt +norfolk? pg100.txt +norland pg31100.txt +norland, pg31100.txt +norland. pg31100.txt +norland; pg31100.txt +norland?" pg31100.txt +normal pg3200.txt +norman. pg100.txt +normandie pg3200.txt +normandy pg100.txt +normandy! pg100.txt +normandy, pg3200.txt +normandy. pg100.txt +normandy; pg100.txt +normandy? pg100.txt +norris pg31100.txt +norris's pg31100.txt +norris, pg31100.txt +norris,' pg31100.txt +norris-- pg31100.txt +norris. pg31100.txt +norris." pg31100.txt +norris; pg31100.txt +norroy pg3200.txt +north pg31100.txt, pg3200.txt, pg100.txt +north's. pg3200.txt +north, pg31100.txt, pg3200.txt, pg100.txt +north-east pg31100.txt, pg3200.txt +north-west-by-west, pg3200.txt +north-west-by-west-and-west-half-west, pg3200.txt +north-west; pg3200.txt +north. pg31100.txt, pg3200.txt, pg100.txt +north: pg31100.txt +north; pg3200.txt, pg100.txt +northampton pg31100.txt +northampton, pg100.txt +northampton. pg31100.txt +northamptonshire pg31100.txt +northamptonshire, pg31100.txt +northamptonshire. pg31100.txt +northamptonshire?" pg31100.txt +northanger pg31100.txt +northanger, pg31100.txt +northanger. pg31100.txt +northanger." pg31100.txt +northanger; pg31100.txt +northeast pg3200.txt +northerly pg3200.txt +northerly. pg100.txt +northern pg3200.txt +northern, pg3200.txt +northernmost, pg3200.txt +northgate. pg100.txt +northing pg3200.txt +northumberland pg31100.txt, pg100.txt +northumberland! pg100.txt +northumberland, pg100.txt +northumberland. pg100.txt +northumberland; pg100.txt +northumberland? pg100.txt +northward pg3200.txt +northward, pg3200.txt +norton pg3200.txt +norton, pg3200.txt +norway, pg3200.txt, pg100.txt +norway. pg100.txt +norway? pg100.txt +nos. pg3200.txt +nose pg31100.txt, pg3200.txt, pg100.txt +nose! pg3200.txt +nose, pg100.txt +nose-herbs. pg100.txt +nose-ring; pg3200.txt +nose. pg3200.txt, pg100.txt +nose: pg3200.txt +nose; pg100.txt +nose? pg3200.txt, pg100.txt +nosegays pg100.txt +noses pg3200.txt, pg100.txt +noses, pg3200.txt, pg100.txt +noses- pg100.txt +noses. pg3200.txt, pg100.txt +noses; pg100.txt +noses? pg100.txt +nostril pg100.txt +nostril. pg100.txt +nostrils pg3200.txt, pg100.txt +nostrils, pg3200.txt +nostrils. pg100.txt +nostrums pg3200.txt +not! pg3200.txt, pg100.txt +not!" pg3200.txt +not'- pg100.txt +not). pg3200.txt +not, pg31100.txt, pg3200.txt, pg100.txt +not," pg3200.txt +not- pg100.txt +not-- pg3200.txt +not--" pg31100.txt +not--and, pg3200.txt +not--but pg31100.txt, pg3200.txt +not--except pg31100.txt +not--for pg3200.txt +not--he pg3200.txt +not--i pg3200.txt +not--oh! pg31100.txt +not--ought pg31100.txt +not--should pg3200.txt +not--thanks pg3200.txt +not--that pg31100.txt +not--though pg3200.txt +not--you pg31100.txt +not-small pg3200.txt +not. pg31100.txt, pg3200.txt, pg100.txt +not." pg31100.txt, pg3200.txt +not.' pg3200.txt, pg100.txt +not.- pg100.txt +not: pg31100.txt, pg3200.txt, pg100.txt +not; pg31100.txt, pg3200.txt, pg100.txt +not? pg31100.txt, pg3200.txt, pg100.txt +not?" pg31100.txt, pg3200.txt +not?' pg3200.txt +not?--is pg31100.txt +not] pg100.txt +notable pg3200.txt, pg100.txt +notable. pg3200.txt +notables pg3200.txt +notably pg3200.txt +notary's; pg100.txt +notation pg3200.txt +notch pg3200.txt +notch'd pg100.txt +notched pg3200.txt +note pg31100.txt, pg3200.txt, pg100.txt +note, pg31100.txt, pg3200.txt, pg100.txt +note--" pg3200.txt +note--and pg3200.txt +note--italian pg3200.txt +note-book pg3200.txt +note-book, pg3200.txt +note-book--arrival pg3200.txt +note-book: pg3200.txt +note-book; pg3200.txt, pg100.txt +note-books, pg3200.txt +note-making pg3200.txt +note-writing, pg31100.txt +note. pg3200.txt, pg100.txt +note." pg3200.txt +note.] pg3200.txt +note: pg31100.txt, pg3200.txt +note; pg3200.txt, pg100.txt +note? pg31100.txt +note?" pg31100.txt, pg3200.txt +notebook pg3200.txt +notebook--none pg3200.txt +noted pg31100.txt, pg3200.txt, pg100.txt +noted, pg3200.txt, pg100.txt +noted. pg3200.txt, pg100.txt +notes pg31100.txt, pg3200.txt, pg100.txt +notes!" pg3200.txt +notes, pg3200.txt, pg100.txt +notes," pg3200.txt +notes. pg3200.txt, pg100.txt +notes." pg3200.txt +notes: pg100.txt +notes; pg3200.txt +noteworthy pg3200.txt +noth'n' pg3200.txt +noth'n. pg3200.txt +nothin'." pg3200.txt +nothing pg31100.txt, pg3200.txt, pg100.txt +nothing! pg31100.txt, pg3200.txt, pg100.txt +nothing'll pg3200.txt +nothing's pg3200.txt +nothing, pg31100.txt, pg3200.txt, pg100.txt +nothing- pg100.txt +nothing-- pg3200.txt +nothing--anybody pg3200.txt +nothing--compared pg3200.txt +nothing--except pg3200.txt +nothing--it pg3200.txt +nothing--the pg3200.txt +nothing. pg31100.txt, pg3200.txt, pg100.txt +nothing." pg31100.txt, pg3200.txt +nothing.' pg3200.txt +nothing: pg31100.txt, pg3200.txt +nothing; pg31100.txt, pg3200.txt, pg100.txt +nothing? pg100.txt +nothing?" pg31100.txt, pg3200.txt +nothing?--perhaps pg31100.txt +nothingness, pg3200.txt +nothings, pg100.txt +nothings. pg3200.txt +nothings? pg100.txt +notice pg31100.txt, pg3200.txt, pg100.txt +notice, pg31100.txt, pg3200.txt, pg100.txt +notice-- pg3200.txt +notice. pg31100.txt, pg3200.txt, pg100.txt +notice." pg31100.txt, pg3200.txt +notice: pg3200.txt +notice; pg31100.txt, pg3200.txt +notice?--and pg31100.txt +noticeable pg3200.txt +noticeable. pg3200.txt +noticeably pg3200.txt +noticed pg31100.txt, pg3200.txt +noticed! pg3200.txt +noticed, pg3200.txt +noticed. pg3200.txt +noticed.' pg3200.txt +notices pg3200.txt +noticin'!" pg3200.txt +noticing pg31100.txt, pg3200.txt +noticing, pg3200.txt +noticing." pg3200.txt +notification pg31100.txt, pg3200.txt +notified pg3200.txt +notified. pg3200.txt +notifies pg31100.txt, pg3200.txt, pg100.txt +notify pg3200.txt +noting pg3200.txt +noting. pg100.txt +noting." pg3200.txt +notion pg31100.txt, pg3200.txt +notion, pg3200.txt +notion- pg100.txt +notion--there pg3200.txt +notion. pg3200.txt +notion." pg31100.txt +notion; pg3200.txt +notions pg31100.txt, pg3200.txt +notions, pg31100.txt +notions--your, pg3200.txt +notions. pg3200.txt +notions; pg31100.txt, pg3200.txt +notions? pg3200.txt +notoriety pg3200.txt +notoriety, pg3200.txt +notoriety. pg3200.txt +notorious pg31100.txt, pg3200.txt +notorious, pg3200.txt +notorious. pg100.txt +notorious." pg3200.txt +notoriously pg3200.txt +nottingham pg3200.txt +notwithstanding pg3200.txt +notwithstanding, pg3200.txt, pg100.txt +notwithstanding,) pg3200.txt +notwithstanding. pg3200.txt +nought pg3200.txt, pg100.txt +nought, pg100.txt +nought. pg31100.txt, pg3200.txt, pg100.txt +nought; pg100.txt +noumea." pg3200.txt +noumenon; pg3200.txt +nouns pg3200.txt +nouns.' pg100.txt +nouns? pg100.txt +noureddin, pg3200.txt +nourish pg3200.txt +nourish'd. pg100.txt +nourished pg3200.txt +nourished? pg100.txt +nourishes pg31100.txt +nourisheth. pg100.txt +nourishment pg100.txt +nourishment. pg3200.txt +nourishment; pg31100.txt +nov. pg3200.txt +novel pg31100.txt, pg3200.txt +novel's pg3200.txt +novel, pg31100.txt, pg3200.txt +novel-writers, pg31100.txt +novel. pg3200.txt +novel." pg31100.txt +novelette pg3200.txt +novelist pg3200.txt +novelist, pg31100.txt, pg3200.txt +novelist. pg3200.txt +novelists. pg3200.txt +novels pg31100.txt, pg3200.txt +novels, pg3200.txt +novels--do pg31100.txt +novels. pg31100.txt, pg3200.txt +novels." pg31100.txt +novels; pg31100.txt +novelties pg3200.txt +novelties, pg3200.txt +novelties. pg3200.txt +novelties; pg3200.txt +novelty pg31100.txt, pg3200.txt +novelty, pg31100.txt, pg3200.txt +novelty--an pg3200.txt +novelty. pg3200.txt +november pg31100.txt, pg3200.txt +november, pg31100.txt, pg3200.txt +november--telegraphs pg3200.txt +november. pg31100.txt, pg3200.txt +november." pg31100.txt +novice pg3200.txt +novice! pg3200.txt +novice, pg3200.txt +novice. pg3200.txt +novices pg3200.txt +novitiate, pg31100.txt +now! pg3200.txt, pg100.txt +now!" pg31100.txt, pg3200.txt +now!' pg3200.txt +now!- pg100.txt +now, pg31100.txt, pg3200.txt, pg100.txt +now," pg3200.txt +now- pg100.txt +now-- pg3200.txt +now--" pg3200.txt +now--' pg3200.txt +now---" pg3200.txt +now--and pg3200.txt +now--average pg3200.txt +now--crisp, pg3200.txt +now--does pg3200.txt +now--edward?" pg3200.txt +now--general!" pg3200.txt +now--have pg3200.txt +now--i pg3200.txt +now--is pg3200.txt +now--it pg3200.txt +now--let pg3200.txt +now--now pg3200.txt +now--to-morrow." pg3200.txt +now--which pg3200.txt +now-a-bed pg100.txt +now-a-days pg3200.txt +now-a-days. pg3200.txt +now-a-days." pg3200.txt +now-a-days; pg3200.txt +now-and-then pg3200.txt +now-departed pg3200.txt +now-very-unconstrained-after-the-newest-fashioned-dressed) pg3200.txt +now. pg31100.txt, pg3200.txt, pg100.txt +now." pg31100.txt, pg3200.txt +now."-- pg31100.txt +now.' pg3200.txt +now.'" pg31100.txt +now.) pg3200.txt +now.- pg100.txt +now.--'you pg31100.txt +now.--poor pg31100.txt +now.] pg3200.txt +now: pg3200.txt +now; pg31100.txt, pg3200.txt, pg100.txt +now? pg31100.txt, pg3200.txt, pg100.txt +now?" pg31100.txt, pg3200.txt +now?' pg3200.txt +now?--has pg31100.txt +nowadays pg3200.txt +nowadays, pg3200.txt +nowadays? pg3200.txt +noway. pg3200.txt +nowhah--hear pg3200.txt +nowhere pg31100.txt, pg3200.txt +nowhere, pg3200.txt +nowhere. pg3200.txt +nowheres." pg3200.txt +noyance; pg100.txt +nub pg3200.txt +nub. pg3200.txt +nub; pg3200.txt +nubibus.' pg100.txt +nude pg3200.txt +nudged pg3200.txt +nudges pg3200.txt +nuffin pg3200.txt +nuffin--dey pg3200.txt +nugget pg3200.txt +nui!" pg3200.txt +nuisance pg3200.txt +nuisance, pg3200.txt +nuisance. pg3200.txt +numb'red? pg100.txt +numb, pg100.txt +numb. pg100.txt +number pg31100.txt, pg3200.txt, pg100.txt +number, pg31100.txt, pg3200.txt, pg100.txt +number. pg31100.txt, pg3200.txt, pg100.txt +number: pg3200.txt +number; pg3200.txt +number? pg3200.txt, pg100.txt +numbered pg3200.txt +numbered. pg3200.txt +numbering pg3200.txt +numberless pg3200.txt +numberless, pg3200.txt +numbers pg31100.txt, pg3200.txt, pg100.txt +numbers, pg3200.txt, pg100.txt +numbers--there pg3200.txt +numbers. pg31100.txt, pg3200.txt, pg100.txt +numbness, pg100.txt +numbskull. pg3200.txt +numerically pg3200.txt +numerous pg31100.txt, pg3200.txt +numerous, pg31100.txt +numerous. pg3200.txt +numismatics pg3200.txt +numskull pg3200.txt +numskull, pg3200.txt +numskull. pg3200.txt +nun pg3200.txt, pg100.txt +nun, pg100.txt +nun. pg31100.txt +nuncle, pg100.txt +nuncle. pg100.txt +nuncle? pg100.txt +nunnery pg100.txt +nunnery--go! pg3200.txt +nunnery. pg3200.txt +nuns. pg100.txt +nuptial pg100.txt +nuptial. pg100.txt +nur pg3200.txt +nuremberg pg3200.txt +nurs'd. pg100.txt +nurse pg31100.txt, pg3200.txt, pg100.txt +nurse, pg3200.txt, pg100.txt +nurse- pg100.txt +nurse--that pg3200.txt +nurse--well, pg3200.txt +nurse. pg31100.txt, pg3200.txt, pg100.txt +nurse." pg3200.txt +nurse.] pg100.txt +nurse; pg100.txt +nurse? pg100.txt +nurse] pg100.txt +nurse]. pg100.txt +nursed pg3200.txt +nursed, pg3200.txt +nursed. pg3200.txt +nurseries pg3200.txt +nurseries, pg3200.txt +nursery pg3200.txt, pg100.txt +nursery) pg3200.txt +nursery, pg3200.txt +nursery-maid, pg31100.txt +nursery-maids, pg31100.txt +nursery. pg3200.txt +nurseryman pg31100.txt +nurses pg3200.txt +nurses, pg3200.txt +nurses. pg3200.txt +nursh-a pg100.txt +nursing pg31100.txt, pg3200.txt, pg100.txt +nurturing pg3200.txt +nut pg3200.txt +nut!" pg3200.txt +nut. pg100.txt +nut." pg3200.txt +nuther, pg3200.txt +nuther. pg3200.txt +nuther." pg3200.txt +nutmeg. pg100.txt +nutriment pg100.txt +nutritiousness pg3200.txt +nuts pg3200.txt +nuts. pg100.txt +nuts." pg3200.txt +nutshell--it pg3200.txt +nutshell. pg3200.txt +nye pg3200.txt +nye) pg3200.txt +nye. pg3200.txt +nym pg100.txt +nym, pg100.txt +nym. pg100.txt +nymph pg3200.txt +nymph, pg100.txt +nymph- pg100.txt +nymph." pg3200.txt +nymphas, pg3200.txt +nymphomaniac, pg3200.txt +nymphs pg100.txt +nymphs, pg100.txt +o! pg100.txt +o' pg3200.txt, pg100.txt +o'clock pg31100.txt, pg3200.txt, pg100.txt +o'clock!" pg31100.txt, pg3200.txt +o'clock" pg3200.txt +o'clock, pg31100.txt, pg3200.txt, pg100.txt +o'clock--so pg3200.txt +o'clock. pg31100.txt, pg3200.txt, pg100.txt +o'clock." pg31100.txt, pg3200.txt +o'clock.' pg3200.txt +o'clock.) pg3200.txt +o'clock; pg3200.txt, pg100.txt +o'clock? pg100.txt +o'connor pg3200.txt +o'connor. pg3200.txt +o'er pg100.txt +o'er! pg100.txt +o'er, pg100.txt +o'er-charged, pg100.txt +o'er-dusted. pg100.txt +o'er-eye. pg100.txt +o'er-masterest? pg100.txt +o'er-match'd. pg100.txt +o'er-read, pg100.txt +o'er-read; pg100.txt +o'er. pg100.txt +o'er." pg3200.txt +o'er; pg100.txt +o'er? pg100.txt +o'erbear pg100.txt +o'erboard- pg100.txt +o'ercame; pg100.txt +o'ercast. pg100.txt +o'erdoing pg100.txt +o'erflow pg3200.txt +o'erflow? pg100.txt +o'erflows pg100.txt +o'ergone pg100.txt +o'ergrown, pg100.txt +o'erhear pg100.txt +o'erheard pg100.txt +o'erjoy'd pg100.txt +o'erleap, pg100.txt +o'erleavens pg100.txt +o'erlook pg100.txt +o'erlook'd, pg100.txt +o'erlooking. pg100.txt +o'ernight pg100.txt +o'erpaid. pg100.txt +o'erparted. pg100.txt +o'erpast. pg100.txt +o'erpast; pg100.txt +o'ershot? pg100.txt +o'erskip pg100.txt +o'erswell pg100.txt +o'erta'en. pg100.txt +o'erthrown! pg100.txt +o'erthrown, pg100.txt +o'erthrown. pg100.txt +o'ertook pg100.txt +o'erwatch'd, pg100.txt +o'erwatch'd. pg100.txt +o'erween pg100.txt +o'erwhelm pg100.txt +o'erworn, pg100.txt +o'flanagan, pg3200.txt +o'flannigan pg3200.txt +o'flannigan." pg3200.txt +o'hooligan. pg3200.txt +o'mulligan pg3200.txt +o's pg3200.txt +o's! pg100.txt +o'shaughnessy pg3200.txt +o'shaughnessy--" pg3200.txt +o'shaunnessy pg3200.txt +o't.' pg3200.txt +o'that! pg100.txt +o'toole pg3200.txt +o) pg3200.txt +o, pg100.txt +o- pg100.txt +o. pg3200.txt, pg100.txt +o.s., pg3200.txt +o? pg100.txt +oak pg31100.txt, pg3200.txt, pg100.txt +oak! pg100.txt +oak- pg100.txt +oak. pg31100.txt, pg3200.txt, pg100.txt +oaken pg3200.txt +oakland pg3200.txt +oakland, pg3200.txt +oaks pg3200.txt +oaks, pg100.txt +oaks. pg3200.txt +oakum. pg3200.txt +oamaru pg3200.txt +oar pg3200.txt +oar, pg3200.txt +oar. pg3200.txt +oared pg100.txt +oars pg3200.txt +oars, pg3200.txt +oars. pg3200.txt +oarsmen pg3200.txt +oases pg3200.txt +oath pg31100.txt, pg3200.txt, pg100.txt +oath! pg3200.txt, pg100.txt +oath!" pg3200.txt +oath" pg3200.txt +oath, pg3200.txt, pg100.txt +oath- pg100.txt +oath--" pg3200.txt +oath. pg100.txt +oath: pg100.txt +oath; pg3200.txt, pg100.txt +oath? pg3200.txt, pg100.txt +oath?" pg3200.txt +oathable, pg100.txt +oaths pg3200.txt, pg100.txt +oaths! pg100.txt +oaths, pg3200.txt, pg100.txt +oaths. pg100.txt +oaths; pg100.txt +oaths? pg100.txt +oats; pg100.txt +ob pg3200.txt +ob.' pg100.txt +obadiah's pg3200.txt +obdurate pg100.txt +obdurate, pg100.txt +obdurate?" pg3200.txt +obedience pg3200.txt, pg100.txt +obedience! pg100.txt +obedience, pg3200.txt, pg100.txt +obedience- pg100.txt +obedience. pg3200.txt, pg100.txt +obedience.' pg3200.txt +obedience; pg100.txt +obedience? pg100.txt +obedient pg3200.txt +obedient, pg100.txt +obedient- pg100.txt +obedient. pg100.txt +obedstown pg3200.txt +obedstown--not pg3200.txt +obedstown. pg3200.txt +obeisance pg3200.txt +obeisance, pg31100.txt +obeisance. pg100.txt +obeisances pg3200.txt +oberon pg100.txt +oberon. pg100.txt +oberon? pg100.txt +obey pg31100.txt, pg3200.txt, pg100.txt +obey! pg100.txt +obey'd pg100.txt +obey'd. pg100.txt +obey'd; pg100.txt +obey, pg3200.txt, pg100.txt +obey. pg3200.txt, pg100.txt +obey." pg3200.txt +obey; pg100.txt +obey? pg3200.txt, pg100.txt +obey?" pg3200.txt +obeyed pg3200.txt +obeyed, pg3200.txt +obeyed. pg3200.txt +obeyed." pg3200.txt +obeyed.' pg3200.txt +obeyed; pg3200.txt +obeying pg3200.txt +obeys pg3200.txt, pg100.txt +obeys, pg100.txt +obispo, pg3200.txt +obituarial pg3200.txt +obituaries pg3200.txt +obituaries, pg3200.txt +obituarist pg3200.txt +obituary pg3200.txt +object pg31100.txt, pg3200.txt, pg100.txt +object! pg100.txt +object, pg31100.txt, pg3200.txt, pg100.txt +object--but pg31100.txt +object--he pg3200.txt +object-lesson. pg3200.txt +object. pg31100.txt, pg3200.txt, pg100.txt +object." pg31100.txt, pg3200.txt +object; pg31100.txt, pg3200.txt +object?" pg31100.txt +objected pg31100.txt, pg3200.txt +objected. pg3200.txt +objected; pg100.txt +objecting pg31100.txt +objecting. pg3200.txt +objection pg31100.txt, pg3200.txt +objection, pg31100.txt, pg3200.txt +objection. pg31100.txt, pg3200.txt +objection." pg31100.txt, pg3200.txt +objection?" pg3200.txt +objectionable pg31100.txt, pg3200.txt +objectionable. pg3200.txt +objectionable?" pg31100.txt +objections pg31100.txt, pg3200.txt +objections, pg31100.txt, pg100.txt +objections: pg31100.txt +objections; pg31100.txt +objections?" pg3200.txt +objectors. pg3200.txt +objects pg31100.txt, pg3200.txt +objects, pg100.txt +objects. pg31100.txt, pg3200.txt +objects.' pg3200.txt +obleege pg3200.txt +obleeged pg3200.txt +obligation pg31100.txt, pg3200.txt +obligation!--the pg31100.txt +obligation, pg31100.txt +obligation-'armigero.' pg100.txt +obligation. pg31100.txt +obligations pg31100.txt, pg3200.txt +obligations, pg3200.txt +obligations; pg3200.txt +obligatory, pg3200.txt +oblige pg31100.txt +oblige. pg3200.txt +oblige." pg3200.txt +obliged pg31100.txt, pg3200.txt +obliged, pg31100.txt +obliged. pg3200.txt +obliging pg31100.txt, pg3200.txt +obliging, pg31100.txt, pg3200.txt +obliging--and pg31100.txt +oblique; pg100.txt +obliquity pg3200.txt +obliterated pg3200.txt +obliterated, pg3200.txt +obliterated. pg3200.txt +obliterated.) pg3200.txt +obliterating pg3200.txt +oblivion pg31100.txt, pg3200.txt +oblivion, pg3200.txt, pg100.txt +oblivion. pg100.txt +oblivion; pg3200.txt, pg100.txt +obnoxious pg3200.txt +obookia pg3200.txt +obookia, pg3200.txt +obscene pg3200.txt, pg100.txt +obscenely pg100.txt +obscenities pg3200.txt +obscenity--the pg3200.txt +obscur'd pg100.txt +obscur'd, pg100.txt +obscur'd. pg100.txt +obscure pg3200.txt +obscure. pg3200.txt +obscure?" pg3200.txt +obscurely pg100.txt +obscurities pg3200.txt +obscurity pg3200.txt +obscurity. pg3200.txt +obscurity.] pg3200.txt +obsequies; pg100.txt +obsequies?" pg3200.txt +obsequious pg3200.txt +obsequiously, pg3200.txt +obsequiousness pg3200.txt +observ'd, pg100.txt +observ'd. pg100.txt +observable pg3200.txt +observance, pg3200.txt, pg100.txt +observance. pg3200.txt, pg100.txt +observance; pg3200.txt, pg100.txt +observance? pg100.txt +observances pg3200.txt +observances, pg100.txt +observancy pg100.txt +observant pg31100.txt +observant, pg31100.txt +observants pg100.txt +observation pg31100.txt, pg3200.txt, pg100.txt +observation, pg31100.txt, pg3200.txt, pg100.txt +observation- pg100.txt +observation. pg31100.txt, pg3200.txt, pg100.txt +observation: pg3200.txt +observation; pg31100.txt +observations pg31100.txt, pg3200.txt +observations, pg31100.txt +observations. pg3200.txt +observatory pg3200.txt +observatory, pg3200.txt +observatory. pg3200.txt +observe pg31100.txt, pg3200.txt, pg100.txt +observe, pg31100.txt, pg3200.txt, pg100.txt +observe. pg31100.txt +observe:-- pg3200.txt +observed pg31100.txt, pg3200.txt, pg100.txt +observed, pg31100.txt, pg3200.txt, pg100.txt +observed--"they pg31100.txt +observed. pg3200.txt +observed: pg31100.txt, pg3200.txt +observer pg31100.txt, pg3200.txt +observer, pg3200.txt +observer. pg3200.txt +observer.... pg3200.txt +observer: pg3200.txt +observer; pg31100.txt +observers pg3200.txt +observers.' pg3200.txt +observes pg3200.txt +observes: pg3200.txt +observes; pg3200.txt +observing pg31100.txt, pg3200.txt +observing, pg31100.txt, pg3200.txt, pg100.txt +observing-- pg31100.txt +obsolete pg31100.txt, pg3200.txt +obsolete. pg31100.txt +obstacle pg31100.txt +obstacle! pg100.txt +obstacle, pg100.txt +obstacles pg31100.txt, pg3200.txt +obstacles. pg3200.txt +obstinacy pg31100.txt, pg100.txt +obstinate pg31100.txt, pg3200.txt +obstinate, pg100.txt +obstinate. pg100.txt +obstruct pg3200.txt +obstructed pg3200.txt +obstructing pg3200.txt +obstructing, pg3200.txt +obstruction pg3200.txt +obstruction. pg3200.txt +obstruction? pg100.txt +obstructionist. pg3200.txt +obstructions pg3200.txt +obstructive. pg3200.txt +obtain pg31100.txt, pg3200.txt +obtain'd pg100.txt +obtain'd, pg100.txt +obtain'd. pg100.txt +obtain, pg100.txt +obtained pg31100.txt, pg3200.txt +obtained, pg31100.txt, pg3200.txt +obtained. pg3200.txt +obtained." pg31100.txt +obtained; pg31100.txt +obtaining pg31100.txt +obtaining--but pg31100.txt +obtrudes pg3200.txt +obtrusive, pg3200.txt +obvious pg31100.txt, pg3200.txt +obvious." pg3200.txt +occasion pg31100.txt, pg3200.txt, pg100.txt +occasion! pg3200.txt, pg100.txt +occasion, pg31100.txt, pg3200.txt, pg100.txt +occasion. pg31100.txt, pg3200.txt, pg100.txt +occasion." pg31100.txt, pg3200.txt +occasion.--they pg31100.txt +occasion: pg31100.txt, pg3200.txt, pg100.txt +occasion; pg31100.txt, pg3200.txt, pg100.txt +occasion? pg3200.txt +occasion?" pg3200.txt +occasional pg3200.txt +occasional, pg31100.txt +occasionally pg31100.txt, pg3200.txt +occasionally, pg31100.txt, pg3200.txt +occasionally. pg3200.txt +occasionally.' pg3200.txt +occasioned pg31100.txt, pg3200.txt +occasioned, pg31100.txt +occasioned. pg31100.txt +occasioned; pg31100.txt +occasions pg31100.txt, pg3200.txt, pg100.txt +occasions, pg31100.txt, pg3200.txt, pg100.txt +occasions. pg31100.txt, pg3200.txt, pg100.txt +occasions: pg3200.txt +occasions? pg3200.txt +occident. pg100.txt +occidental pg3200.txt +occult." pg3200.txt +occult.' pg3200.txt +occupancy pg3200.txt +occupant pg3200.txt +occupants pg3200.txt +occupation pg31100.txt, pg3200.txt, pg100.txt +occupation, pg31100.txt, pg3200.txt +occupation-- pg3200.txt +occupation. pg3200.txt, pg100.txt +occupation." pg3200.txt +occupation; pg3200.txt +occupation?" pg3200.txt +occupations pg3200.txt +occupations, pg31100.txt, pg3200.txt +occupations. pg3200.txt +occupied pg31100.txt, pg3200.txt +occupied, pg3200.txt +occupied. pg3200.txt +occupied; pg3200.txt +occupier." pg31100.txt +occupies pg3200.txt +occupies, pg3200.txt +occupies. pg3200.txt +occupy pg31100.txt, pg3200.txt, pg100.txt +occupy. pg31100.txt +occupying pg3200.txt +occupying; pg31100.txt +occur pg31100.txt, pg3200.txt +occur! pg31100.txt +occur, pg3200.txt +occur. pg31100.txt, pg3200.txt +occur." pg31100.txt +occur; pg3200.txt +occur?" pg3200.txt +occur?' pg3200.txt +occurred pg31100.txt, pg3200.txt +occurred, pg31100.txt, pg3200.txt +occurred. pg31100.txt, pg3200.txt +occurred; pg3200.txt +occurred? pg3200.txt +occurrence pg31100.txt, pg3200.txt +occurrence, pg31100.txt, pg3200.txt +occurrence. pg3200.txt +occurrence: pg3200.txt +occurrences pg31100.txt, pg3200.txt +occurring pg3200.txt +occurring, pg31100.txt +occurs pg3200.txt +occurs: pg3200.txt +occurs; pg3200.txt +ocean pg3200.txt, pg100.txt +ocean!" pg3200.txt +ocean, pg3200.txt, pg100.txt +ocean--and pg3200.txt +ocean. pg3200.txt, pg100.txt +ocean." pg3200.txt +ocean; pg100.txt +oceans pg3200.txt +oceans. pg100.txt +ochterlony pg3200.txt +ochterlony. pg3200.txt +octagon pg31100.txt +octavia pg100.txt +octavia, pg100.txt +octavia. pg100.txt +octavia? pg100.txt +octavia] pg100.txt +octavius pg100.txt +octavius, pg100.txt +octavius. pg100.txt +octavo, pg3200.txt +october pg3200.txt +october, pg3200.txt +october,--but pg31100.txt +october--" pg3200.txt +october. pg31100.txt, pg3200.txt +october." pg31100.txt +octogenarian pg3200.txt +odd pg31100.txt, pg3200.txt +odd! pg31100.txt +odd!" pg31100.txt +odd, pg31100.txt, pg3200.txt, pg100.txt +odd-tempered pg31100.txt +odd. pg3200.txt, pg100.txt +odd." pg31100.txt, pg3200.txt +odd; pg3200.txt +oddest pg31100.txt, pg3200.txt +oddly." pg31100.txt +odds pg3200.txt, pg100.txt +odds, pg3200.txt, pg100.txt +odds. pg100.txt +odds.' pg100.txt +odds: pg100.txt +odds; pg100.txt +odes pg100.txt +odessa pg3200.txt +odious pg3200.txt, pg100.txt +odious, pg3200.txt, pg100.txt +odious. pg31100.txt, pg3200.txt +odious; pg31100.txt +odor pg3200.txt +odorless. pg3200.txt +odorous! pg100.txt +odors pg3200.txt +odour pg3200.txt +odsbodikins, pg3200.txt +oeillades; pg100.txt +oesophagus pg3200.txt +of! pg31100.txt, pg3200.txt, pg100.txt +of!" pg3200.txt +of"! pg3200.txt +of't. pg100.txt +of't: pg100.txt +of) pg3200.txt +of, pg31100.txt, pg3200.txt, pg100.txt +of," pg3200.txt +of- pg100.txt +of-- pg31100.txt, pg3200.txt +of--" pg31100.txt, pg3200.txt +of--$20.00 pg3200.txt +of--' pg3200.txt +of----" pg3200.txt +of--and pg31100.txt, pg3200.txt +of--er--" pg3200.txt +of--er--a pg3200.txt +of--inferior pg31100.txt +of--johannesburg--the pg3200.txt +of--still, pg3200.txt +of--that pg3200.txt +of--these pg3200.txt +of--to pg3200.txt +of--what? pg3200.txt +of-to-day. pg3200.txt +of. pg31100.txt, pg3200.txt, pg100.txt +of." pg31100.txt, pg3200.txt +of.' pg31100.txt, pg3200.txt +of.--a pg31100.txt +of.--see pg31100.txt +of.--the pg31100.txt +of: pg3200.txt +of; pg31100.txt, pg3200.txt, pg100.txt +of;--but pg31100.txt +of? pg31100.txt, pg3200.txt, pg100.txt +of?" pg31100.txt, pg3200.txt +of?' pg3200.txt +of?- pg100.txt +of?--what pg31100.txt +ofephesus pg100.txt +off pg31100.txt, pg3200.txt, pg100.txt +off! pg31100.txt, pg3200.txt, pg100.txt +off!" pg31100.txt, pg3200.txt +off!--and pg31100.txt +off" pg3200.txt +off' pg3200.txt +off'n pg3200.txt +off'ring! pg100.txt +off's pg100.txt +off, pg31100.txt, pg3200.txt, pg100.txt +off- pg100.txt +off--you've pg3200.txt +off-watch pg3200.txt +off. pg31100.txt, pg3200.txt, pg100.txt +off." pg31100.txt, pg3200.txt +off.' pg3200.txt +off.] pg100.txt +off; pg31100.txt, pg3200.txt, pg100.txt +off? pg3200.txt, pg100.txt +off?" pg31100.txt, pg3200.txt +off] pg100.txt +offal pg3200.txt +offenbaren. pg3200.txt +offence pg31100.txt, pg3200.txt, pg100.txt +offence! pg100.txt +offence!" pg31100.txt +offence, pg3200.txt, pg100.txt +offence,) pg3200.txt +offence- pg100.txt +offence. pg31100.txt, pg3200.txt, pg100.txt +offence." pg31100.txt, pg3200.txt +offence.--you pg31100.txt +offence: pg100.txt +offence; pg3200.txt, pg100.txt +offence? pg3200.txt, pg100.txt +offence?" pg31100.txt, pg3200.txt +offences pg3200.txt, pg100.txt +offences, pg100.txt +offences. pg3200.txt, pg100.txt +offend pg31100.txt, pg3200.txt, pg100.txt +offend, pg100.txt +offend. pg31100.txt, pg100.txt +offended pg31100.txt, pg3200.txt, pg100.txt +offended, pg3200.txt, pg100.txt +offended. pg3200.txt, pg100.txt +offended; pg3200.txt, pg100.txt +offended? pg100.txt +offender pg3200.txt +offender, pg100.txt +offender. pg100.txt +offenders pg3200.txt +offenders. pg3200.txt +offending pg31100.txt, pg3200.txt, pg100.txt +offending; pg100.txt +offends pg100.txt +offends; pg100.txt +offense pg3200.txt, pg100.txt +offense, pg3200.txt, pg100.txt +offense. pg31100.txt, pg3200.txt, pg100.txt +offense; pg3200.txt +offense?" pg3200.txt +offenses pg3200.txt +offensive pg31100.txt, pg3200.txt +offensive, pg3200.txt +offensive. pg3200.txt, pg100.txt +offensive; pg3200.txt +offensively pg3200.txt +offensiveness. pg3200.txt +offer pg31100.txt, pg3200.txt, pg100.txt +offer'd pg100.txt +offer'd, pg100.txt +offer'd. pg100.txt +offer, pg31100.txt, pg3200.txt, pg100.txt +offer- pg100.txt +offer. pg31100.txt, pg3200.txt, pg100.txt +offer." pg3200.txt +offer: pg3200.txt +offer; pg3200.txt, pg100.txt +offer? pg3200.txt +offer?" pg3200.txt +offered pg31100.txt, pg3200.txt, pg100.txt +offered, pg31100.txt, pg3200.txt +offered--"ah! pg31100.txt +offered. pg31100.txt, pg3200.txt +offered." pg3200.txt +offered?" pg3200.txt +offering pg31100.txt, pg3200.txt +offering." pg31100.txt +offerings pg3200.txt +offerings, pg3200.txt +offers pg31100.txt, pg3200.txt +offers, pg100.txt +offers--or pg3200.txt +offers. pg3200.txt +offers." pg31100.txt +offers: pg3200.txt +offers; pg3200.txt, pg100.txt +office pg31100.txt, pg3200.txt, pg100.txt +office! pg3200.txt, pg100.txt +office!" pg3200.txt +office"--and pg3200.txt +office's, pg3200.txt +office, pg31100.txt, pg3200.txt, pg100.txt +office," pg3200.txt +office--" pg3200.txt +office--forever. pg3200.txt +office. pg3200.txt, pg100.txt +office." pg31100.txt, pg3200.txt +office: pg3200.txt +office; pg3200.txt, pg100.txt +office? pg3200.txt, pg100.txt +officer pg31100.txt, pg3200.txt, pg100.txt +officer! pg100.txt +officer, pg3200.txt, pg100.txt +officer--a pg3200.txt +officer. pg3200.txt, pg100.txt +officer." pg3200.txt, pg100.txt +officer.] pg100.txt +officer; pg100.txt +officer? pg100.txt +officered pg3200.txt +officered." pg3200.txt +officers pg31100.txt, pg3200.txt, pg100.txt +officers' pg3200.txt +officers, pg31100.txt, pg3200.txt, pg100.txt +officers--inquiring pg3200.txt +officers--instructions pg3200.txt +officers-at-arms pg100.txt +officers. pg31100.txt, pg3200.txt, pg100.txt +officers." pg3200.txt +officers.] pg3200.txt +officers; pg31100.txt, pg3200.txt, pg100.txt +offices pg31100.txt, pg3200.txt, pg100.txt +offices, pg31100.txt, pg100.txt +offices. pg31100.txt, pg3200.txt, pg100.txt +offices? pg100.txt +official pg31100.txt, pg3200.txt, pg100.txt +official, pg3200.txt +officially pg3200.txt +officially, pg3200.txt +officially. pg3200.txt +officials pg3200.txt +officials, pg3200.txt +officials." pg3200.txt +officiate pg3200.txt +officiating pg31100.txt +officious pg31100.txt, pg100.txt +officious?" pg31100.txt +officiousness pg31100.txt +offish pg3200.txt +offset pg3200.txt +offspring pg3200.txt +offspring, pg3200.txt +offspring. pg3200.txt +offstart pg3200.txt +oft pg100.txt +oft, pg100.txt +oft- pg100.txt +oft-times pg3200.txt, pg100.txt +oft-times--'" pg3200.txt +oft. pg100.txt +oft; pg100.txt +oft? pg100.txt +often pg31100.txt, pg3200.txt, pg100.txt +often, pg31100.txt, pg3200.txt, pg100.txt +often--but pg3200.txt +often-expressed pg31100.txt +often. pg31100.txt, pg3200.txt +often." pg31100.txt +often.' pg3200.txt +often; pg31100.txt +often?" pg31100.txt, pg3200.txt +oftener pg31100.txt, pg3200.txt +oftener. pg3200.txt +oftener; pg31100.txt +oftenest pg31100.txt +oftenest. pg3200.txt +oful pg3200.txt +ogdens pg3200.txt +ogre pg3200.txt +ogre's pg3200.txt +ogres pg3200.txt +ogw. pg3200.txt +oh pg31100.txt, pg3200.txt +oh! pg31100.txt, pg3200.txt, pg100.txt +oh!--you pg31100.txt +oh, pg31100.txt, pg3200.txt +ohio pg3200.txt +ohio, pg3200.txt +ohio: pg3200.txt +oho! pg3200.txt +oil pg3200.txt +oil"-- pg3200.txt +oil, pg3200.txt +oil--" pg3200.txt +oil-cloth pg3200.txt +oil-painting pg3200.txt +oil-pictures pg3200.txt +oil. pg3200.txt +oil." pg3200.txt +oil; pg3200.txt, pg100.txt +oiled. pg31100.txt +oiling pg3200.txt +oils pg3200.txt +oils--and pg3200.txt +oise, pg3200.txt +oklahoma, pg3200.txt +old! pg3200.txt, pg100.txt +old!" pg31100.txt +old) pg3200.txt +old, pg31100.txt, pg3200.txt, pg100.txt +old- pg3200.txt +old--and--and-- pg3200.txt +old--how pg31100.txt +old--manifestly pg3200.txt +old--we pg3200.txt +old-fashioned pg3200.txt +old-fashioned, pg3200.txt +old-fashioned. pg3200.txt +old-time pg3200.txt +old. pg31100.txt, pg3200.txt, pg100.txt +old." pg31100.txt, pg3200.txt +old._] pg31100.txt +old: pg100.txt +old; pg3200.txt, pg100.txt +old? pg3200.txt, pg100.txt +old?" pg31100.txt, pg3200.txt +olde pg3200.txt +olden pg3200.txt +older pg31100.txt, pg3200.txt, pg100.txt +older, pg31100.txt, pg3200.txt, pg100.txt +older. pg3200.txt +older." pg31100.txt +older: pg31100.txt +oldest pg3200.txt +ole pg3200.txt +ole, pg3200.txt +ole----" pg3200.txt +oleander pg3200.txt +oliphant pg3200.txt +olive- pg3200.txt +olive-oil pg3200.txt +oliver pg3200.txt, pg100.txt +oliver, pg100.txt +oliver. pg100.txt +olives; pg3200.txt +olivia pg100.txt +olivia's pg100.txt +olivia- pg100.txt +olivia. pg100.txt +olivia? pg100.txt +ollendorff pg3200.txt +ollendorff, pg3200.txt +ollendorff. pg3200.txt +olympus, pg100.txt +olympus. pg100.txt +olympus? pg100.txt +omaha, pg3200.txt +oman pg100.txt +oman. pg100.txt +omen. pg3200.txt +omens pg3200.txt +ominous pg3200.txt +ominous. pg100.txt +omission pg31100.txt +omission, pg3200.txt +omission. pg3200.txt +omissions pg3200.txt +omissions, pg31100.txt +omit pg31100.txt, pg100.txt +omit'st pg100.txt +omit. pg3200.txt, pg100.txt +omitted pg31100.txt, pg3200.txt, pg100.txt +omitted, pg100.txt +omitted. pg3200.txt +omitting pg31100.txt, pg3200.txt +omnes pg100.txt +omnes. pg100.txt +omnibus, pg3200.txt +omnibuses pg3200.txt +omnipotence, pg3200.txt +omnipotent, pg100.txt +omnipotent; pg3200.txt +on! pg3200.txt, pg100.txt +on!" pg31100.txt, pg3200.txt +on!"] pg3200.txt +on!--so pg31100.txt +on" pg31100.txt +on's pg100.txt +on't pg100.txt +on't! pg100.txt +on't, pg100.txt +on't. pg100.txt +on't; pg100.txt +on't? pg100.txt +on'y pg3200.txt +on), pg100.txt +on, pg31100.txt, pg3200.txt, pg100.txt +on," pg3200.txt +on,) pg3200.txt +on-- pg31100.txt, pg3200.txt +on--" pg3200.txt +on---- pg3200.txt +on--and pg3200.txt +on--at pg3200.txt +on--flaunting pg3200.txt +on--for pg3200.txt +on--in pg3200.txt +on--mine pg100.txt +on--no, pg3200.txt +on--what pg31100.txt +on-looking pg3200.txt +on-rushing pg3200.txt +on. pg31100.txt, pg3200.txt, pg100.txt +on." pg31100.txt, pg3200.txt +on.' pg3200.txt, pg100.txt +on.) pg31100.txt +on.- pg100.txt +on._"] pg31100.txt +on: pg31100.txt, pg3200.txt +on; pg31100.txt, pg3200.txt, pg100.txt +on;' pg3200.txt +on? pg3200.txt, pg100.txt +on?" pg31100.txt, pg3200.txt +on?' pg3200.txt +onanism." pg3200.txt +once pg31100.txt, pg3200.txt, pg100.txt +once! pg3200.txt, pg100.txt +once, pg31100.txt, pg3200.txt, pg100.txt +once- pg100.txt +once--" pg3200.txt +once--and pg3200.txt +once--but pg3200.txt +once--except pg3200.txt +once--i pg3200.txt +once--you'll pg3200.txt +once-flourishing pg3200.txt +once. pg31100.txt, pg3200.txt, pg100.txt +once." pg31100.txt, pg3200.txt +once.) pg3200.txt +once: pg3200.txt, pg100.txt +once; pg3200.txt, pg100.txt +once;--i pg31100.txt +once? pg31100.txt, pg100.txt +once?" pg3200.txt +one! pg31100.txt, pg3200.txt, pg100.txt +one!" pg31100.txt, pg3200.txt +one!). pg3200.txt +one"--and pg3200.txt +one's pg31100.txt, pg3200.txt +one's. pg3200.txt +one) pg31100.txt +one), pg3200.txt +one, pg31100.txt, pg3200.txt, pg100.txt +one," pg3200.txt +one- pg3200.txt, pg100.txt +one-- pg3200.txt +one--" pg3200.txt +one--and pg3200.txt +one--anjy." pg3200.txt +one--but pg3200.txt +one--certainly pg3200.txt +one--i pg3200.txt +one--men pg3200.txt +one--mr pg31100.txt +one--not pg3200.txt +one--the pg3200.txt +one--to pg31100.txt +one--two--three, pg3200.txt +one--what pg3200.txt +one-and-twenty pg31100.txt +one-and-twenty." pg31100.txt +one-armed pg3200.txt +one-eighth pg3200.txt +one-eyed pg3200.txt +one-fifteenth. pg3200.txt +one-fifth pg3200.txt +one-half pg3200.txt +one-horse pg3200.txt +one-hundredths pg3200.txt +one-sixth; pg3200.txt +one-tell. pg100.txt +one-third pg3200.txt +one-today. pg3200.txt +one. pg31100.txt, pg3200.txt, pg100.txt +one." pg31100.txt, pg3200.txt +one.' pg31100.txt, pg3200.txt +one.'" pg3200.txt +one.) pg31100.txt, pg3200.txt +one: pg3200.txt, pg100.txt +one:--he pg31100.txt +one; pg31100.txt, pg3200.txt, pg100.txt +one;--hundreds pg3200.txt +one? pg31100.txt, pg3200.txt, pg100.txt +one?" pg3200.txt +one?' pg3200.txt +one] pg100.txt +onereuse', pg3200.txt +ones pg31100.txt, pg3200.txt, pg100.txt +ones! pg3200.txt +ones!" pg3200.txt +ones, pg31100.txt, pg3200.txt, pg100.txt +ones--108 pg3200.txt +ones--a pg3200.txt +ones--not pg3200.txt +ones--um--" pg3200.txt +ones--un--' pg3200.txt +ones. pg31100.txt, pg3200.txt, pg100.txt +ones." pg31100.txt, pg3200.txt +ones.) pg3200.txt +ones: pg3200.txt, pg100.txt +ones; pg3200.txt, pg100.txt +ones? pg100.txt +ones?" pg3200.txt +oneself pg31100.txt +oneself!--poor pg31100.txt +oneyers, pg100.txt +ongles? pg100.txt +onion pg3200.txt +onion!" pg3200.txt +onion. pg3200.txt +onions pg3200.txt +online pg31100.txt, pg3200.txt, pg100.txt +onlooker pg3200.txt +onlookers; pg3200.txt +only! pg3200.txt, pg100.txt +only, pg31100.txt, pg3200.txt, pg100.txt +only-- pg31100.txt +only--" pg3200.txt +only--63. pg3200.txt +only--but pg3200.txt +only--i--" pg3200.txt +only. pg31100.txt, pg3200.txt, pg100.txt +only." pg31100.txt, pg3200.txt +only: pg3200.txt +only; pg31100.txt, pg3200.txt +only? pg3200.txt +onreasonable, pg3200.txt +onrush pg3200.txt +onshore pg3200.txt +onslaught pg3200.txt +onteora, pg3200.txt +onto pg3200.txt +onward pg3200.txt +onward, pg3200.txt +onyx,' pg3200.txt +oohipara pg3200.txt +oolitic pg3200.txt +oos, pg3200.txt +ooze pg3200.txt, pg100.txt +oozes pg100.txt +oozing pg3200.txt +opal. pg100.txt +opals pg3200.txt +opaque?" pg3200.txt +ope pg100.txt +ope, pg100.txt +open pg31100.txt, pg3200.txt, pg100.txt +open'd, pg100.txt +open'd. pg100.txt +open't. pg100.txt +open, pg31100.txt, pg3200.txt, pg100.txt +open--" pg3200.txt +open-air pg3200.txt +open. pg31100.txt, pg3200.txt, pg100.txt +open." pg31100.txt, pg3200.txt +open: pg100.txt +open; pg3200.txt, pg100.txt +open? pg100.txt +open?" pg3200.txt +opened pg31100.txt, pg3200.txt +opened, pg31100.txt, pg3200.txt +opened. pg3200.txt +opened." pg31100.txt +opened: pg31100.txt +opened? pg3200.txt +opening pg31100.txt, pg3200.txt +opening. pg31100.txt, pg3200.txt +opening." pg3200.txt +opening: pg31100.txt +opening?" pg3200.txt +openings pg31100.txt, pg3200.txt +openings, pg31100.txt +openly pg31100.txt, pg100.txt +openly, pg100.txt +openly. pg100.txt +openly." pg31100.txt +openly; pg31100.txt +openly? pg100.txt +openness pg31100.txt +openness, pg31100.txt +opens pg3200.txt +opens, pg3200.txt +opera pg3200.txt +opera, pg3200.txt +opera--they pg3200.txt +opera-girl--i pg31100.txt +opera-glass. pg3200.txt +opera-house pg3200.txt +opera." pg3200.txt +opera; pg3200.txt +operas, pg3200.txt +operas. pg3200.txt +operate pg31100.txt, pg3200.txt, pg100.txt +operate! pg31100.txt +operatic pg3200.txt +operating pg31100.txt +operating, pg3200.txt +operation pg31100.txt, pg3200.txt +operation, pg3200.txt +operation. pg3200.txt +operations pg3200.txt +operations. pg3200.txt +operations." pg3200.txt +operatives, pg3200.txt +operator pg3200.txt +operator, pg3200.txt +ophelia pg100.txt +ophelia! pg100.txt +ophelia, pg100.txt +ophelia,'- pg100.txt +ophelia- pg100.txt +ophelia. pg100.txt +ophelia? pg100.txt +ophir!---- pg3200.txt +ophir. pg3200.txt +ophir." pg3200.txt +opinion pg31100.txt, pg3200.txt, pg100.txt +opinion! pg100.txt +opinion'd. pg100.txt +opinion, pg31100.txt, pg3200.txt, pg100.txt +opinion--" pg3200.txt +opinion--but pg3200.txt +opinion--in pg31100.txt +opinion. pg31100.txt, pg3200.txt, pg100.txt +opinion." pg31100.txt, pg3200.txt +opinion.--miss pg31100.txt +opinion; pg31100.txt +opinion? pg3200.txt, pg100.txt +opinion?" pg31100.txt, pg3200.txt +opinion?--pray pg31100.txt +opinionated. pg3200.txt +opinions pg31100.txt, pg3200.txt, pg100.txt +opinions, pg31100.txt, pg3200.txt, pg100.txt +opinions--i pg31100.txt +opinions. pg31100.txt, pg3200.txt +opinions." pg31100.txt +opinions; pg100.txt +opinions? pg3200.txt +opium, pg3200.txt +opium-smoking pg3200.txt +opoponax, pg3200.txt +opossum, pg3200.txt +oppenau pg3200.txt +oppenau, pg3200.txt +opponent pg3200.txt +opponents' pg3200.txt +opportunities pg31100.txt, pg3200.txt +opportunities, pg31100.txt +opportunities--but pg3200.txt +opportunities. pg3200.txt +opportunities; pg3200.txt +opportunity pg31100.txt, pg3200.txt, pg100.txt +opportunity, pg3200.txt, pg100.txt +opportunity-- pg3200.txt +opportunity. pg31100.txt, pg3200.txt, pg100.txt +opportunity." pg31100.txt, pg3200.txt +opportunity; pg3200.txt +oppos'd pg100.txt +oppos'd, pg100.txt +oppos'd. pg100.txt +oppose pg31100.txt, pg100.txt +oppose, pg31100.txt, pg3200.txt +oppose. pg100.txt +opposed pg31100.txt, pg3200.txt, pg100.txt +opposed. pg31100.txt, pg3200.txt +opposer. pg100.txt +opposing pg31100.txt, pg3200.txt +opposite pg31100.txt, pg3200.txt, pg100.txt +opposite. pg31100.txt, pg100.txt +opposites. pg100.txt +opposition pg31100.txt, pg3200.txt, pg100.txt +opposition, pg31100.txt, pg3200.txt +opposition. pg31100.txt, pg3200.txt +opposition.] pg3200.txt +opposition; pg3200.txt +oppress pg3200.txt +oppress'd pg100.txt +oppress'd, pg100.txt +oppress, pg100.txt +oppressed pg31100.txt, pg3200.txt +oppressed. pg31100.txt, pg100.txt +oppression pg31100.txt, pg3200.txt, pg100.txt +oppression, pg3200.txt +oppression. pg100.txt +oppressive pg3200.txt +oppressive. pg31100.txt, pg3200.txt +oppressor. pg3200.txt +opprest: pg100.txt +opprobriously? pg100.txt +optics?" pg3200.txt +optimism pg3200.txt +optimistic pg3200.txt +option. pg3200.txt +opulence pg3200.txt +opulence, pg3200.txt +opulency. pg100.txt +opulent pg3200.txt +or, pg31100.txt, pg3200.txt, pg100.txt +or--" pg3200.txt +or--' pg3200.txt +or-it pg100.txt +or: pg3200.txt +oracle pg3200.txt, pg100.txt +oracle, pg3200.txt, pg100.txt +oracle- pg100.txt +oracle. pg100.txt +oracle: pg100.txt +oracles, pg100.txt +oracles?"--edition pg3200.txt +oracular pg3200.txt +orange pg3200.txt +orange, pg3200.txt +orange-trees pg3200.txt +orange-wife pg100.txt +oranges pg3200.txt +oranges, pg3200.txt +oranges?" pg3200.txt +oration pg3200.txt +oration, pg3200.txt +oration--enterprise. pg3200.txt +oration. pg3200.txt +oration.' pg100.txt +oration; pg100.txt +orations pg3200.txt +orator pg3200.txt, pg100.txt +orator, pg3200.txt, pg100.txt +orator. pg100.txt +orator." pg31100.txt +orator: pg3200.txt +orator; pg100.txt +orators, pg3200.txt, pg100.txt +orators. pg100.txt +oratory pg3200.txt, pg100.txt +oratory, pg3200.txt, pg100.txt +oratory. pg3200.txt +oratory: pg3200.txt +orb pg100.txt +orb, pg100.txt +orb." pg3200.txt +orbit pg3200.txt +orbit; pg3200.txt +orbits, pg3200.txt +orbs pg100.txt +orbs, pg100.txt +orbs?" pg3200.txt +orchard pg31100.txt, pg3200.txt, pg100.txt +orchard, pg3200.txt, pg100.txt +orchard. pg31100.txt, pg100.txt +orchards pg3200.txt +orchards, pg100.txt +orchestra pg3200.txt +orchestra. pg3200.txt +ordain'd! pg100.txt +ordain'd. pg100.txt +ordained pg3200.txt +ordained; pg3200.txt +ordeal pg3200.txt +ordeal, pg3200.txt +ordeal--committee pg3200.txt +ordeal. pg3200.txt +order pg31100.txt, pg3200.txt, pg100.txt +order! pg3200.txt +order!" pg3200.txt +order!' pg3200.txt +order, pg31100.txt, pg3200.txt, pg100.txt +order-- pg3200.txt +order--and pg3200.txt +order-book pg3200.txt +order. pg31100.txt, pg3200.txt, pg100.txt +order." pg3200.txt +order: pg3200.txt +order; pg3200.txt, pg100.txt +order? pg3200.txt, pg100.txt +order?' pg3200.txt +ordered pg31100.txt, pg3200.txt +ordered, pg31100.txt, pg3200.txt +ordered--" pg3200.txt +ordered. pg3200.txt +ordered: pg100.txt +ordered; pg3200.txt, pg100.txt +ordering pg3200.txt +ordering; pg100.txt +orderless, pg100.txt +orderly pg3200.txt +orderly. pg3200.txt, pg100.txt +orders pg31100.txt, pg3200.txt +orders! pg3200.txt +orders!" pg3200.txt +orders, pg31100.txt, pg3200.txt +orders--here pg3200.txt +orders--we pg3200.txt +orders. pg31100.txt, pg3200.txt +orders." pg31100.txt, pg3200.txt +orders: pg3200.txt +orders;' pg3200.txt +orders?" pg3200.txt +ordinance pg3200.txt, pg100.txt +ordinance, pg100.txt +ordinance. pg100.txt +ordinant. pg100.txt +ordinarily pg3200.txt +ordinarily, pg3200.txt +ordinarily-impossible pg3200.txt +ordinary pg31100.txt, pg3200.txt, pg100.txt +ordinary. pg31100.txt, pg100.txt +ordinary; pg3200.txt +ordination." pg31100.txt +ordination; pg31100.txt +ordnance pg100.txt +ordnance. pg3200.txt +ore--splendid pg3200.txt +ore. pg3200.txt +ore; pg3200.txt +oregon pg3200.txt +oreille!" pg3200.txt +oreille?" pg3200.txt +ores pg3200.txt +ores. pg3200.txt +ores? pg3200.txt +orful pg3200.txt +organ pg3200.txt +organ, pg3200.txt +organ-grinder, pg3200.txt +organ-grinders. pg3200.txt +organ-grinding, pg3200.txt +organ-pipes pg3200.txt +organ. pg100.txt +organic pg3200.txt +organised pg3200.txt +organised. pg3200.txt +organization pg3200.txt +organization, pg3200.txt +organization. pg3200.txt +organize pg3200.txt +organize; pg3200.txt +organized pg3200.txt +organized--and pg3200.txt +organizer pg3200.txt +organs pg3200.txt, pg100.txt +organum. pg3200.txt +orgies pg3200.txt +orgies. pg3200.txt +orgies." pg3200.txt +orgies?" pg3200.txt +orgy pg3200.txt +orgy. pg3200.txt +orient pg3200.txt +orient--" pg3200.txt +oriental pg3200.txt +orifice. pg3200.txt +orifices pg3200.txt +origin pg31100.txt, pg3200.txt, pg100.txt +origin, pg3200.txt +origin,- pg100.txt +origin--" pg3200.txt +origin. pg3200.txt +origin." pg3200.txt +origin; pg31100.txt +original pg31100.txt, pg3200.txt +original" pg3200.txt +original, pg3200.txt +original. pg3200.txt, pg100.txt +original." pg3200.txt +original; pg3200.txt +originality pg3200.txt +originality. pg3200.txt +originally pg31100.txt, pg3200.txt +originally, pg3200.txt +originals pg3200.txt +originals, pg3200.txt +originals--which pg3200.txt +originals. pg3200.txt +originate pg3200.txt +originated pg31100.txt, pg3200.txt +originated. pg3200.txt +originator pg3200.txt +orion pg3200.txt +orion! pg3200.txt +orion's. pg3200.txt +orion, pg3200.txt +orion. pg3200.txt +orisons pg3200.txt, pg100.txt +orisons, pg100.txt +orlando pg100.txt +orlando! pg100.txt +orlando'- pg100.txt +orlando, pg100.txt +orlando. pg100.txt +orlando? pg100.txt +orleans pg3200.txt, pg100.txt +orleans! pg3200.txt +orleans!" pg3200.txt +orleans, pg3200.txt, pg100.txt +orleans--and pg3200.txt +orleans--had pg3200.txt +orleans--jackson's pg3200.txt +orleans. pg3200.txt, pg100.txt +orleans." pg3200.txt +orleans: pg100.txt +orleans; pg100.txt +orleans?' pg3200.txt +orme pg3200.txt +ormond, pg31100.txt +ornament pg31100.txt, pg3200.txt +ornament, pg100.txt +ornament- pg100.txt +ornament. pg3200.txt, pg100.txt +ornament? pg100.txt +ornamental pg3200.txt +ornamental, pg3200.txt +ornamental. pg3200.txt +ornamentation pg3200.txt +ornamentation, pg3200.txt +ornamentation. pg3200.txt +ornamentation? pg3200.txt +ornamented pg31100.txt, pg3200.txt +ornamenting pg3200.txt +ornaments pg31100.txt, pg3200.txt, pg100.txt +ornaments' pg3200.txt +ornaments, pg31100.txt, pg3200.txt, pg100.txt +ornaments. pg100.txt +ornaments; pg100.txt +ornate pg3200.txt +ornery pg3200.txt +ornery. pg3200.txt +ornithologer pg3200.txt +ornithorhynchus pg3200.txt +ornithorhynchus, pg3200.txt +ornithorhynchus--grotesquest pg3200.txt +orodes, pg100.txt +orphan pg3200.txt +orphan. pg3200.txt +orphans pg3200.txt +orphans, pg3200.txt +orphans," pg3200.txt +orphans. pg3200.txt +orphans? pg3200.txt +orrin pg3200.txt +orsino's pg100.txt +orsino's. pg100.txt +orsino. pg100.txt +orthodox pg3200.txt +orthodox, pg3200.txt +orthodoxy pg3200.txt +orthodoxy, pg3200.txt +orthography, pg3200.txt +ortons, pg3200.txt +osborn pg3200.txt +osgood pg3200.txt +osgood's pg3200.txt +osgood, pg3200.txt +osgood. pg3200.txt +osric, pg100.txt +osric. pg100.txt +ossip. pg3200.txt +ostensible pg31100.txt, pg3200.txt +ostensible. pg31100.txt +ostensibly pg3200.txt +ostent pg100.txt +ostent, pg100.txt +ostentation pg31100.txt +ostentation, pg3200.txt, pg100.txt +ostentation,- pg100.txt +ostentation. pg3200.txt, pg100.txt +ostentatious pg31100.txt, pg3200.txt +ostentatious. pg3200.txt +osteopathist. pg3200.txt +osteopathists. pg3200.txt +osteopathy pg3200.txt +osteopathy. pg3200.txt +ostler pg31100.txt, pg100.txt +ostler! pg100.txt +ostler? pg100.txt +ostracized pg3200.txt +ostrich pg3200.txt, pg100.txt +oswald pg3200.txt +oswald.] pg100.txt +oswald? pg100.txt +oswald]. pg100.txt +otard pg3200.txt +othello pg100.txt +othello! pg100.txt +othello, pg100.txt +othello. pg100.txt +othello? pg100.txt +other! pg31100.txt, pg100.txt +other's pg31100.txt, pg3200.txt +other, pg31100.txt, pg3200.txt, pg100.txt +other- pg100.txt +other--" pg3200.txt +other--and pg3200.txt +other--composites. pg3200.txt +other--hesitated pg3200.txt +other--no pg3200.txt +other--so, pg3200.txt +other--that pg31100.txt +other--there's pg3200.txt +other--what pg31100.txt +other. pg31100.txt, pg3200.txt, pg100.txt +other." pg31100.txt, pg3200.txt +other.' pg3200.txt +other: pg31100.txt, pg3200.txt +other; pg31100.txt, pg3200.txt, pg100.txt +other? pg31100.txt, pg3200.txt +other?" pg31100.txt, pg3200.txt +other?' pg3200.txt +other?--how pg31100.txt +others pg31100.txt, pg3200.txt, pg100.txt +others!" pg31100.txt, pg3200.txt +others); pg3200.txt +others, pg31100.txt, pg3200.txt, pg100.txt +others--his pg3200.txt +others--of pg3200.txt +others--she pg31100.txt +others--they pg3200.txt +others--this pg3200.txt +others. pg31100.txt, pg3200.txt, pg100.txt +others." pg31100.txt, pg3200.txt +others.] pg100.txt +others: pg3200.txt +others; pg31100.txt, pg3200.txt, pg100.txt +others? pg31100.txt, pg3200.txt, pg100.txt +others?" pg31100.txt, pg3200.txt +others] pg100.txt +others]. pg100.txt +otherwhere; pg100.txt +otherwheres; pg3200.txt +otherwise pg31100.txt, pg3200.txt, pg100.txt +otherwise, pg31100.txt, pg3200.txt, pg100.txt +otherwise," pg3200.txt +otherwise-- pg3200.txt +otherwise. pg31100.txt, pg3200.txt, pg100.txt +otherwise." pg3200.txt +otherwise.? pg100.txt +otherwise: pg100.txt +otherwise; pg3200.txt +otherwise? pg3200.txt +otherwise?" pg31100.txt +otherwise?' pg3200.txt +otherwise?), pg100.txt +otho pg3200.txt +ottar pg3200.txt +ottawa. pg3200.txt +otter. pg100.txt +otter? pg100.txt +ottoman pg3200.txt +ottoman. pg100.txt +ottomites. pg100.txt +ottomites? pg100.txt +otway pg31100.txt +ouachita, pg3200.txt +ouchy-lausanne pg3200.txt +oudside, pg3200.txt +ouen, pg3200.txt +ouen. pg3200.txt +ought pg31100.txt, pg3200.txt, pg100.txt +ought! pg31100.txt +ought, pg31100.txt +ought--" pg3200.txt +ought. pg31100.txt +ought." pg31100.txt, pg3200.txt +ought; pg31100.txt +ought?" pg31100.txt +oughter pg3200.txt +oughtn't pg3200.txt +oukaparinga pg3200.txt +ounce pg3200.txt, pg100.txt +ounce, pg3200.txt +ounce. pg100.txt +ounces--" pg3200.txt +ounces. pg3200.txt +ouns!'" pg3200.txt +our--" pg3200.txt +ourn pg3200.txt +ourn, pg3200.txt +ours pg31100.txt, pg3200.txt, pg100.txt +ours! pg3200.txt, pg100.txt +ours!" pg3200.txt +ours, pg31100.txt, pg3200.txt, pg100.txt +ours- pg100.txt +ours-- pg3200.txt +ours. pg31100.txt, pg3200.txt, pg100.txt +ours." pg3200.txt +ours; pg3200.txt, pg100.txt +ours? pg100.txt +ourself pg100.txt +ourself, pg100.txt +ourself. pg100.txt +ourself; pg100.txt +ourselves pg31100.txt, pg3200.txt, pg100.txt +ourselves, pg31100.txt, pg3200.txt, pg100.txt +ourselves- pg100.txt +ourselves. pg31100.txt, pg3200.txt, pg100.txt +ourselves." pg31100.txt, pg3200.txt +ourselves.' pg3200.txt +ourselves: pg31100.txt +ourselves; pg3200.txt +ourselves? pg100.txt +ourselves?" pg31100.txt +out! pg3200.txt, pg100.txt +out!" pg3200.txt +out!' pg3200.txt +out!'] pg3200.txt +out!- pg100.txt +out!--or pg3200.txt +out" pg3200.txt +out"--and pg3200.txt +out"--not pg3200.txt +out, pg31100.txt, pg3200.txt, pg100.txt +out,-- pg31100.txt, pg3200.txt +out- pg3200.txt, pg100.txt +out-- pg31100.txt, pg3200.txt +out--" pg3200.txt +out--and pg3200.txt +out--do pg31100.txt +out--i pg31100.txt +out--there's pg3200.txt +out--to pg3200.txt +out-and-out pg3200.txt +out-breath'd, pg100.txt +out-churchill'd pg31100.txt +out-door pg3200.txt +out-doors. pg3200.txt +out-talk pg3200.txt +out-vied. pg100.txt +out. pg31100.txt, pg3200.txt, pg100.txt +out." pg31100.txt, pg3200.txt +out."] pg3200.txt +out.' pg3200.txt, pg100.txt +out.'" pg3200.txt +out.] pg3200.txt, pg100.txt +out: pg31100.txt, pg3200.txt, pg100.txt +out:] pg3200.txt +out; pg31100.txt, pg3200.txt, pg100.txt +out? pg3200.txt, pg100.txt +out?" pg31100.txt, pg3200.txt +out?' pg3200.txt +out?) pg3200.txt +out] pg100.txt +outbreak pg3200.txt +outburst pg3200.txt +outbursts pg3200.txt +outcast pg3200.txt +outcast. pg3200.txt +outcast." pg3200.txt +outcast: pg3200.txt +outcasts pg3200.txt +outcasts. pg3200.txt +outcome pg3200.txt +outcome! pg3200.txt +outcroppings, pg3200.txt +outcry." pg3200.txt +outdare pg100.txt +outdone pg31100.txt, pg3200.txt +outen pg3200.txt +outer pg3200.txt +outfac'd pg100.txt +outface pg100.txt +outfit pg3200.txt +outfit, pg3200.txt +outfit. pg3200.txt +outfits; pg3200.txt +outfitted pg3200.txt +outgo pg3200.txt, pg100.txt +outgoes pg100.txt +outgoing pg3200.txt +outgrew. pg3200.txt +outgrew: pg3200.txt +outgrown pg31100.txt, pg3200.txt +outing pg3200.txt +outjest pg100.txt +outlandish, pg3200.txt +outlandishness, pg3200.txt +outlast pg3200.txt +outlaw pg3200.txt +outlawed. pg3200.txt +outlawry pg100.txt +outlaws pg100.txt +outlaws, pg3200.txt, pg100.txt +outlaws--and pg3200.txt +outlaws--fugitives pg3200.txt +outlaws. pg3200.txt +outlay pg3200.txt +outlay. pg3200.txt +outlet pg3200.txt +outlet. pg3200.txt +outlet?" pg3200.txt +outlets pg3200.txt +outliars. pg3200.txt +outline pg3200.txt +outline, pg3200.txt +outline. pg3200.txt +outlined pg3200.txt +outlines pg3200.txt +outlive pg3200.txt, pg100.txt +outlived pg3200.txt +outlook pg3200.txt +outlook, pg3200.txt +outlook. pg3200.txt +outlook; pg3200.txt +outlook?" pg3200.txt +outlying pg3200.txt +output pg3200.txt +outrage pg3200.txt, pg100.txt +outrage! pg100.txt +outrage!" pg3200.txt +outrage. pg3200.txt, pg100.txt +outraged pg3200.txt +outraged. pg3200.txt +outraged; pg3200.txt +outraged? pg3200.txt +outrageous pg3200.txt +outrageous, pg3200.txt +outrageously pg3200.txt +outrages pg100.txt +outrages. pg100.txt +outrages; pg100.txt +outram pg3200.txt +outram, pg3200.txt +outre." pg3200.txt +outrigger pg3200.txt +outright pg31100.txt +outright! pg100.txt +outright. pg100.txt +outroar pg100.txt +outrun pg31100.txt, pg3200.txt, pg100.txt +outs pg31100.txt +outscorn pg100.txt +outside pg3200.txt +outside, pg3200.txt, pg100.txt +outside. pg3200.txt +outside.' pg3200.txt +outside; pg3200.txt +outsider's. pg3200.txt +outsiders pg3200.txt +outsiders--none pg3200.txt +outsides, pg100.txt +outskirts pg3200.txt +outspeaks pg100.txt +outspoken. pg3200.txt +outspokenly pg3200.txt +outspread." pg3200.txt +outstretch'd pg100.txt +outward pg31100.txt, pg3200.txt, pg100.txt +outward! pg100.txt +outward, pg100.txt +outward--i pg3200.txt +outwardly pg3200.txt +outwardly, pg100.txt +outwardly. pg100.txt +outweigh pg3200.txt +outweigh, pg3200.txt +outworn, pg100.txt +oval, pg3200.txt +oval. pg3200.txt +ovation; pg3200.txt +oven--and pg3200.txt +oven-like pg3200.txt +oven. pg3200.txt +over pg31100.txt, pg3200.txt, pg100.txt +over! pg100.txt +over!" pg31100.txt, pg3200.txt +over!' pg3200.txt +over, pg31100.txt, pg3200.txt, pg100.txt +over," pg31100.txt +over--a pg3200.txt +over--everywhere." pg3200.txt +over--something pg3200.txt +over-anxious, pg31100.txt +over-blown; pg100.txt +over-careful pg31100.txt +over-described. pg3200.txt +over-estimating pg3200.txt +over-excitement. pg3200.txt +over-exposure. pg3200.txt +over-express pg3200.txt +over-guessing; pg3200.txt +over-happy. pg100.txt +over-haste pg3200.txt +over-long! pg100.txt +over-much. pg100.txt +over-night, pg100.txt +over-plus, pg100.txt +over-powered pg31100.txt +over-proud pg100.txt +over-ripe pg3200.txt +over-running. pg100.txt +over-scrupulous pg31100.txt +over-scrupulous? pg31100.txt +over-shot. pg100.txt +over-study pg3200.txt +over-supply pg3200.txt +over-tedious. pg100.txt +over-throw pg31100.txt +over-view? pg100.txt +over-warm pg3200.txt +over-work. pg3200.txt +over. pg31100.txt, pg3200.txt, pg100.txt +over." pg31100.txt, pg3200.txt +over.' pg3200.txt +over: pg3200.txt +over; pg31100.txt, pg3200.txt, pg100.txt +over? pg3200.txt +over?" pg3200.txt +over?' pg3200.txt +overalls pg3200.txt +overalls; pg3200.txt +overawe. pg100.txt +overawing pg3200.txt +overbearing pg31100.txt +overblown pg100.txt +overblown. pg100.txt +overboard pg3200.txt, pg100.txt +overboard!" pg3200.txt +overboard!' pg3200.txt +overboard, pg3200.txt, pg100.txt +overboard. pg3200.txt +overboard.) pg3200.txt +overborne pg3200.txt +overborne, pg100.txt +overborne. pg100.txt +overbuys pg100.txt +overcame pg31100.txt, pg3200.txt +overcame. pg100.txt +overcareful pg31100.txt +overclouded," pg3200.txt +overcoat pg3200.txt +overcoat, pg3200.txt +overcoats pg3200.txt +overcome pg31100.txt, pg3200.txt, pg100.txt +overcome, pg31100.txt, pg100.txt +overcome-- pg31100.txt +overcome. pg31100.txt, pg100.txt +overcome." pg31100.txt +overcomes] pg100.txt +overcoming pg31100.txt, pg3200.txt +overcrowding pg3200.txt +overdid pg3200.txt +overdo pg3200.txt +overdoing pg3200.txt +overdone pg3200.txt, pg100.txt +overdone's pg100.txt +overdone. pg3200.txt, pg100.txt +overdose. pg3200.txt +overdrawn pg3200.txt +overdrawn; pg31100.txt +overdriven pg3200.txt +overdue, pg3200.txt +overestimate pg3200.txt +overflow pg3200.txt +overflow. pg3200.txt +overflowed pg3200.txt +overflowing pg3200.txt +overflowing. pg3200.txt +overflowing; pg31100.txt +overflowings pg3200.txt +overgorg'd pg100.txt +overhanging pg3200.txt +overhanging. pg3200.txt +overhaul pg3200.txt +overhead pg3200.txt +overhead, pg3200.txt +overhead. pg3200.txt, pg100.txt +overhead; pg3200.txt +overhear pg31100.txt +overhear- pg100.txt +overheard pg31100.txt, pg3200.txt, pg100.txt +overheard. pg31100.txt +overjoyed pg31100.txt, pg3200.txt, pg100.txt +overjoyed--it pg3200.txt +overlaid pg3200.txt +overland pg3200.txt +overleather. pg100.txt +overloaded pg3200.txt +overlook pg31100.txt, pg3200.txt +overlook'd pg100.txt +overlook, pg31100.txt +overlooked pg31100.txt, pg3200.txt +overlooked, pg3200.txt +overlooked. pg31100.txt, pg3200.txt +overlooked." pg3200.txt +overlooking pg31100.txt, pg3200.txt +overlooks pg3200.txt +overlooks, pg100.txt +overpassed pg3200.txt +overplus pg3200.txt +overpower pg31100.txt +overpowered pg31100.txt +overpowering pg31100.txt, pg3200.txt +overpowering!" pg31100.txt +overpowering. pg31100.txt +overpowers pg31100.txt +overrated. pg3200.txt +overrul'd; pg100.txt +overrun. pg100.txt +overseer pg3200.txt +overset pg100.txt +overshadow pg3200.txt +overshadowed pg3200.txt +overshadowing pg3200.txt +overshoes." pg3200.txt +overshot. pg100.txt +oversight pg3200.txt +oversight, pg3200.txt +oversights. pg100.txt +overspread pg31100.txt, pg3200.txt +overspreading pg31100.txt +overstain'd pg100.txt +overstatement. pg3200.txt +overstrain pg3200.txt +overstrained! pg31100.txt +overswear; pg100.txt +overt pg3200.txt +overtake pg31100.txt, pg3200.txt +overtake? pg100.txt +overtaken pg3200.txt +overtaken. pg31100.txt +overthrow pg31100.txt, pg3200.txt, pg100.txt +overthrow. pg100.txt +overthrow; pg100.txt +overthrow? pg100.txt +overthrown pg100.txt +overthrown! pg100.txt +overthrown. pg100.txt +overthrown; pg100.txt +overthrows pg100.txt +overthwart--" pg3200.txt +overtook pg31100.txt, pg3200.txt +overture. pg100.txt +overture; pg31100.txt +overtures pg31100.txt +overturn pg3200.txt +overturn'd. pg100.txt +overturn, pg100.txt +overturned pg3200.txt +overturned." pg31100.txt +overturning pg3200.txt +overwatch'd. pg100.txt +overweigh pg100.txt +overwhelm pg3200.txt +overwhelmed pg3200.txt +overwhelmed, pg31100.txt +overworked. pg3200.txt +overworn, pg3200.txt +overworn. pg100.txt +overwrought pg3200.txt +ow'd pg100.txt +ow'd, pg100.txt +ow'st, pg100.txt +owe pg31100.txt, pg3200.txt, pg100.txt +owe, pg100.txt +owe. pg100.txt +owe: pg100.txt +owe; pg100.txt +owe? pg100.txt +owed pg31100.txt, pg3200.txt, pg100.txt +owed, pg100.txt +owed. pg100.txt +owen pg3200.txt, pg100.txt +owens, pg3200.txt +owes pg31100.txt, pg3200.txt, pg100.txt +owes, pg100.txt +owes--" pg31100.txt +owes. pg100.txt +owes; pg100.txt +owes? pg100.txt +owest, pg100.txt +owing pg31100.txt, pg3200.txt +owing. pg31100.txt +owl pg3200.txt, pg100.txt +owl- pg100.txt +owl. pg100.txt +owl: pg100.txt +owls pg3200.txt +own! pg3200.txt, pg100.txt +own!" pg3200.txt +own"--there pg3200.txt +own, pg31100.txt, pg3200.txt, pg100.txt +own- pg100.txt +own--have pg3200.txt +own--her pg31100.txt +own--shall pg3200.txt +own--there pg3200.txt +own--under pg3200.txt +own. pg31100.txt, pg3200.txt, pg100.txt +own." pg31100.txt, pg3200.txt +own.' pg100.txt +own; pg31100.txt, pg3200.txt, pg100.txt +own? pg31100.txt, pg3200.txt, pg100.txt +own?" pg31100.txt, pg3200.txt +own_. pg3200.txt +owned pg31100.txt, pg3200.txt +owner pg31100.txt, pg3200.txt, pg100.txt +owner's pg3200.txt +owner, pg31100.txt, pg3200.txt, pg100.txt +owner. pg100.txt +owner." pg3200.txt +owner; pg3200.txt +owners pg31100.txt, pg3200.txt +owners, pg100.txt +owners. pg3200.txt +owners." pg3200.txt +ownership! pg3200.txt +owning pg3200.txt +owns pg31100.txt, pg3200.txt +ox pg3200.txt +ox. pg100.txt +ox." pg31100.txt +oxen. pg3200.txt +oxford pg31100.txt, pg3200.txt, pg100.txt +oxford"; pg31100.txt +oxford, pg31100.txt, pg3200.txt, pg100.txt +oxford. pg3200.txt, pg100.txt +oxford." pg31100.txt +oxford; pg31100.txt +oxford? pg100.txt +oxford?"--was pg31100.txt +oxlips, pg100.txt +oxpressions!--" pg3200.txt +oyes pg100.txt +oyes. pg100.txt +oyster pg3200.txt, pg100.txt +oyster, pg3200.txt, pg100.txt +oyster-shells, pg3200.txt +oyster-wench; pg100.txt +oyster. pg3200.txt, pg100.txt +oysters pg3200.txt +oysters, pg31100.txt +oysters; pg3200.txt +ozana pg3200.txt +ozana. pg3200.txt +p'ans pg3200.txt +p'int." pg3200.txt +p'inted pg3200.txt +p'inting pg3200.txt +p'ints pg3200.txt +p'isoned pg3200.txt +p'raps. pg3200.txt +p's. pg100.txt +p--_op_-corn!" pg3200.txt +p. pg3200.txt +p.'s pg3200.txt +p.13. pg3200.txt +p.m. pg3200.txt +p.m., pg3200.txt +p.m.--got pg3200.txt +p.s. pg3200.txt +p324. pg3200.txt +pa pg3200.txt +pa! pg3200.txt +pa'm-tree pg3200.txt +pa's pg3200.txt +paar pg3200.txt +pabylon- pg100.txt +pace pg31100.txt, pg3200.txt, pg100.txt +pace, pg100.txt +pace. pg31100.txt, pg3200.txt +paced pg31100.txt, pg3200.txt +paces pg100.txt +pacific pg3200.txt +pacific, pg3200.txt +pacific. pg3200.txt +pacific: pg3200.txt +pacific; pg3200.txt +pacific? pg3200.txt +pacificator pg3200.txt +pacified. pg3200.txt, pg100.txt +pacing pg31100.txt, pg3200.txt +pack pg31100.txt, pg3200.txt, pg100.txt +pack! pg100.txt +pack'd, pg100.txt +pack'd; pg100.txt +pack-animals pg3200.txt +pack-horse pg3200.txt +pack-mules pg3200.txt +pack-thread. pg100.txt +pack; pg3200.txt +packages pg3200.txt +packard pg3200.txt +packed pg3200.txt +packed, pg3200.txt +packet pg3200.txt, pg100.txt +packet. pg3200.txt +packet; pg31100.txt +packets pg100.txt +packets. pg100.txt +packhorses, pg100.txt +packing pg31100.txt, pg3200.txt +packing, pg31100.txt +packing. pg3200.txt, pg100.txt +pad pg3200.txt +padded pg3200.txt +padded, pg3200.txt +padding pg3200.txt +paddle pg3200.txt +paddled pg3200.txt +paddlin' pg3200.txt +paddling pg3200.txt +paddling. pg3200.txt +paddock. pg31100.txt +padlocked, pg3200.txt +padlocks pg3200.txt +pads." pg3200.txt +padua pg100.txt +padua, pg3200.txt, pg100.txt +padua. pg100.txt +padua; pg100.txt +pagan pg3200.txt, pg100.txt +pagan!" pg3200.txt +pagan, pg3200.txt, pg100.txt +pagans, pg3200.txt +page pg3200.txt, pg100.txt +page! pg100.txt +page, pg3200.txt, pg100.txt +page. pg31100.txt, pg3200.txt, pg100.txt +page." pg3200.txt +page.] pg3200.txt, pg100.txt +page: pg3200.txt +page; pg3200.txt, pg100.txt +page? pg100.txt +pageant pg3200.txt, pg100.txt +pageant, pg3200.txt, pg100.txt +pageant--during pg3200.txt +pageant. pg3200.txt, pg100.txt +pageants pg3200.txt +pageants. pg100.txt +pages pg31100.txt, pg3200.txt, pg100.txt +pages, pg3200.txt +pages. pg3200.txt, pg100.txt +pah! pg3200.txt, pg100.txt +pah!" pg3200.txt +pah!"--and pg3200.txt +paid pg31100.txt, pg3200.txt, pg100.txt +paid, pg31100.txt, pg100.txt +paid. pg3200.txt, pg100.txt +paid.- pg100.txt +paid; pg100.txt +paige pg3200.txt +paige) pg3200.txt +paige, pg3200.txt +pail pg3200.txt +pail, pg3200.txt, pg100.txt +pails pg3200.txt +pain pg31100.txt, pg3200.txt, pg100.txt +pain! pg3200.txt +pain"--meaning pg3200.txt +pain'd pg100.txt +pain, pg31100.txt, pg3200.txt, pg100.txt +pain-killer pg3200.txt +pain-killer. pg3200.txt +pain. pg31100.txt, pg3200.txt, pg100.txt +pain." pg3200.txt +pain.' pg3200.txt +pain.'' pg3200.txt +pain.--he pg31100.txt +pain: pg3200.txt +pain; pg31100.txt, pg100.txt +pain? pg3200.txt, pg100.txt +pain?" pg31100.txt, pg3200.txt +pain?' pg3200.txt +paine pg3200.txt +paine, pg3200.txt +paine. pg3200.txt +paine; pg3200.txt +pained pg31100.txt, pg3200.txt +pained, pg3200.txt +painful pg31100.txt, pg3200.txt +painful, pg31100.txt, pg3200.txt, pg100.txt +painful. pg31100.txt, pg3200.txt +painfully pg3200.txt +painfully, pg3200.txt +paining pg3200.txt +painkiller--" pg3200.txt +painless pg3200.txt +pains pg31100.txt, pg3200.txt, pg100.txt +pains! pg100.txt +pains, pg3200.txt, pg100.txt +pains. pg3200.txt, pg100.txt +pains: pg31100.txt +pains; pg100.txt +pains? pg100.txt +painstaking pg3200.txt +painstakingly pg3200.txt +paint pg31100.txt, pg3200.txt, pg100.txt +paint, pg3200.txt +paint. pg3200.txt +paint." pg3200.txt +paint?" pg3200.txt +painted pg31100.txt, pg3200.txt, pg100.txt +painted, pg3200.txt, pg100.txt +painted; pg3200.txt +painter pg3200.txt, pg100.txt +painter, pg3200.txt +painter. pg3200.txt +painter; pg3200.txt +painters pg31100.txt, pg3200.txt +painters--the pg3200.txt +painting pg3200.txt, pg100.txt +painting, pg100.txt +painting. pg3200.txt +paintings pg3200.txt +paintings; pg31100.txt +paints pg3200.txt +pair pg31100.txt, pg3200.txt, pg100.txt +pair'd pg100.txt +pair, pg3200.txt +pair. pg3200.txt +pair?" pg3200.txt +paired, pg100.txt +pairfaitemaw." pg3200.txt +pairs pg3200.txt +pajock. pg100.txt +pal pg3200.txt +pal'd pg100.txt +palace pg3200.txt, pg100.txt +palace! pg100.txt +palace's pg3200.txt +palace, pg3200.txt, pg100.txt +palace--from pg3200.txt +palace-shops pg3200.txt +palace. pg3200.txt, pg100.txt +palace." pg3200.txt +palace.] pg100.txt +palace; pg3200.txt, pg100.txt +palaces pg3200.txt +palaces! pg100.txt +palaces, pg3200.txt, pg100.txt +palaces,'--terms pg3200.txt +palaces. pg3200.txt +palaces; pg3200.txt +paladin pg3200.txt +paladin's pg3200.txt +paladin, pg3200.txt +paladin. pg3200.txt +paladin." pg3200.txt +paladin?" pg3200.txt +palamedes pg100.txt +palanquins." pg31100.txt +palate pg100.txt +palate, pg100.txt +palate- pg100.txt +palate. pg3200.txt +palates pg100.txt +palatial pg3200.txt +palatial! pg3200.txt +palatine. pg100.txt +palaver pg3200.txt +pale pg31100.txt, pg3200.txt, pg100.txt +pale, pg31100.txt, pg3200.txt, pg100.txt +pale. pg31100.txt, pg3200.txt, pg100.txt +pale." pg31100.txt +pale; pg3200.txt +pale? pg100.txt +paled, pg3200.txt +paleness pg3200.txt +palestine pg3200.txt +palestine, pg3200.txt, pg100.txt +palestine. pg3200.txt +palestine." pg3200.txt +palette. pg3200.txt +palimpsest. pg3200.txt +paling pg3200.txt +palisades, pg31100.txt +pall pg3200.txt +pall, pg3200.txt +pall. pg3200.txt +pallas. pg100.txt +palled pg3200.txt +pallet pg3200.txt +palliative, pg3200.txt +pallid pg3200.txt +pallidly pg3200.txt +pallor pg3200.txt +palm pg31100.txt, pg3200.txt +palm, pg100.txt +palm-tree. pg100.txt +palm. pg3200.txt +palm." pg3200.txt +palm; pg3200.txt +palm? pg100.txt +palmer pg31100.txt +palmer!" pg31100.txt +palmer's, pg31100.txt +palmer, pg31100.txt +palmer--"then pg31100.txt +palmer. pg31100.txt +palmer; pg31100.txt +palmer;--"not pg31100.txt +palmer?" pg31100.txt +palmers, pg31100.txt +palmetto pg3200.txt +palmistry, pg3200.txt +palms pg3200.txt +palms, pg3200.txt +palmy pg3200.txt +palmyra. pg3200.txt +palmyra.' pg3200.txt +palpable pg100.txt +palpable, pg3200.txt +palpable. pg3200.txt +palpitates pg3200.txt +pals pg3200.txt +palsied pg100.txt +palsy pg3200.txt +palt'ring pg100.txt +palter pg3200.txt +palter. pg100.txt +paltering pg3200.txt +paltry pg3200.txt +pamela pg3200.txt +pamela, pg3200.txt +pamela. pg3200.txt +pampered, pg3200.txt +pamphlet pg3200.txt +pamphlet, pg3200.txt +pamphlet. pg3200.txt +pamphlet.) pg3200.txt +pamphlets pg31100.txt +pan pg3200.txt +pan!" pg3200.txt +pan, pg3200.txt +pan. pg3200.txt +pan." pg3200.txt +panacea. pg3200.txt +pancackes pg100.txt +pandar pg100.txt +pandar, pg100.txt +pandar; pg100.txt +pandars; pg100.txt +pandarus pg100.txt +pandarus! pg100.txt +pandarus- pg100.txt +pandarus. pg100.txt +pandemonium pg3200.txt +pander, pg100.txt +pander. pg100.txt +panderly pg100.txt +pandulph pg100.txt +panegyrics. pg3200.txt +panels, pg3200.txt +panes pg3200.txt +panes. pg3200.txt +pang pg31100.txt, pg3200.txt, pg100.txt +pang, pg3200.txt +pang. pg3200.txt +pang." pg3200.txt +pang; pg31100.txt +panging pg100.txt +pangs pg100.txt +pangs. pg3200.txt, pg100.txt +panic pg3200.txt +panic, pg3200.txt +panic--for pg3200.txt +panic. pg3200.txt +panicky. pg3200.txt +panico pg3200.txt +panics pg3200.txt +panics, pg3200.txt +panned pg3200.txt +pannemaker, pg3200.txt +panniers pg3200.txt +panoplied pg3200.txt +panoply pg3200.txt +panoply. pg3200.txt +panorama pg3200.txt +panorama, pg3200.txt +panorama. pg3200.txt +panorama?' pg3200.txt +panoramist pg3200.txt +pans pg3200.txt +pans; pg3200.txt +pant pg100.txt +pantaloon, pg100.txt +pantaloon. pg100.txt +pantaloons pg3200.txt +pantaloons, pg3200.txt +pantaloons. pg3200.txt +panted pg3200.txt +panted, pg3200.txt, pg100.txt +pantheists, pg3200.txt +pantheon, pg3200.txt +panthino pg100.txt +panting pg31100.txt, pg3200.txt +panting, pg3200.txt +panting. pg3200.txt, pg100.txt +pantograph pg3200.txt +pantograph, pg3200.txt +pantomime pg3200.txt +pantries pg31100.txt +pantry, pg100.txt +pantry.' pg3200.txt +pants pg3200.txt, pg100.txt +pap pg3200.txt +pap's pg3200.txt +pap, pg3200.txt, pg100.txt +pap--" pg3200.txt +pap--his pg3200.txt +papa pg31100.txt, pg3200.txt +papa! pg3200.txt +papa, pg3200.txt +papa. pg31100.txt, pg3200.txt +papa." pg3200.txt +papa?" pg31100.txt +papacy, pg3200.txt +papal pg3200.txt +papas, pg31100.txt +paper pg31100.txt, pg3200.txt, pg100.txt +paper!' pg3200.txt +paper"; pg3200.txt +paper's pg3200.txt +paper, pg31100.txt, pg3200.txt, pg100.txt +paper," pg3200.txt +paper- pg100.txt +paper--left pg3200.txt +paper--pay pg3200.txt +paper--thus: pg3200.txt +paper--tilbury pg3200.txt +paper-cutter, pg3200.txt +paper-knife pg3200.txt +paper-making, pg3200.txt +paper-weight, pg3200.txt +paper. pg31100.txt, pg3200.txt, pg100.txt +paper." pg31100.txt, pg3200.txt +paper.] pg3200.txt, pg100.txt +paper: pg3200.txt +paper; pg31100.txt, pg3200.txt, pg100.txt +paper? pg100.txt +paper] pg100.txt +papers pg31100.txt, pg3200.txt +papers, pg3200.txt +papers--and pg3200.txt +papers. pg3200.txt, pg100.txt +papers." pg3200.txt +papers?--have pg3200.txt +papers] pg100.txt +pappy." pg3200.txt +paps pg100.txt +par pg3200.txt +par--real pg3200.txt +par-am. pg3200.txt +par-dessus pg3200.txt +par. pg3200.txt +para pg3200.txt +parable pg3200.txt +parable. pg3200.txt, pg100.txt +paracelsus. pg100.txt +parachute, pg3200.txt +parachute? pg3200.txt +parade pg31100.txt, pg3200.txt +parade, pg3200.txt +parades, pg3200.txt +parading pg3200.txt +paradise pg3200.txt, pg100.txt +paradise, pg3200.txt, pg100.txt +paradise," pg3200.txt +paradise--partly pg3200.txt +paradise. pg3200.txt, pg100.txt +paradise." pg3200.txt +paradise; pg3200.txt +paradise?' pg100.txt +paradox, pg100.txt +paradoxes. pg100.txt +paragon pg100.txt +paragon. pg100.txt +paragraph pg31100.txt, pg3200.txt, pg100.txt +paragraph, pg3200.txt +paragraph. pg3200.txt +paragraph." pg3200.txt +paragraph: pg3200.txt +paragraph; pg3200.txt +paragraphs pg3200.txt +paragraphs, pg3200.txt +paragraphs. pg3200.txt +paragraphs." pg3200.txt +paragraphs.--m. pg3200.txt +parah, pg3200.txt +parallel pg3200.txt, pg100.txt +parallel!" pg3200.txt +parallel'd pg100.txt +parallel, pg100.txt +parallel. pg100.txt +parallels pg3200.txt +paralysed pg3200.txt +paralysed. pg3200.txt +paralysis pg3200.txt +paralysis; pg3200.txt +paralyzation pg3200.txt +paralyzed, pg3200.txt +paralyzed; pg3200.txt +paralyzing pg3200.txt +paramour pg100.txt +paramour, pg3200.txt +paramour. pg100.txt +paramour? pg100.txt +paramours, pg100.txt +parapets, pg100.txt +paraphernalia pg3200.txt +parasites pg3200.txt +parasites, pg100.txt +parasol pg3200.txt +parasol, pg31100.txt +parasol. pg3200.txt +parasols pg3200.txt +parawirra pg3200.txt +parcel pg31100.txt, pg3200.txt, pg100.txt +parcel. pg31100.txt +parcels pg31100.txt, pg3200.txt, pg100.txt +parcels, pg31100.txt +parcels." pg31100.txt +parched; pg3200.txt +parching pg3200.txt +parchment pg3200.txt +parchment, pg3200.txt +parchment. pg3200.txt +parchment? pg100.txt +pard, pg3200.txt, pg100.txt +pard." pg3200.txt +pard?" pg3200.txt +pardon pg31100.txt, pg3200.txt, pg100.txt +pardon! pg100.txt +pardon!- pg100.txt +pardon, pg31100.txt, pg3200.txt, pg100.txt +pardon. pg31100.txt, pg3200.txt, pg100.txt +pardon." pg31100.txt, pg3200.txt +pardon; pg31100.txt, pg3200.txt, pg100.txt +pardon? pg100.txt +pardon?" pg3200.txt +pardonable." pg31100.txt +pardoned pg3200.txt +pardoned--pull pg3200.txt +pardoned. pg31100.txt, pg100.txt +pardonnez-moi! pg100.txt +pardons pg3200.txt +pardons, pg3200.txt, pg100.txt +pardons. pg100.txt +pare pg100.txt +paregoric. pg3200.txt +parent!" pg31100.txt +parent, pg31100.txt +parent. pg3200.txt +parentage pg3200.txt +parentage, pg3200.txt, pg100.txt +parentage. pg3200.txt +parentage; pg100.txt +parentage? pg100.txt +parentage?' pg100.txt +parental pg31100.txt, pg3200.txt +parenthesis pg3200.txt +parenthesis, pg3200.txt +parenthesis. pg3200.txt +parenthetic pg3200.txt +parents pg31100.txt, pg3200.txt +parents, pg31100.txt, pg3200.txt, pg100.txt +parents. pg31100.txt, pg3200.txt +parents?" pg3200.txt +parents?' pg3200.txt +pari. pg3200.txt +pariah, pg3200.txt +parier pg3200.txt +paring pg3200.txt +paring-knife? pg100.txt +parings. pg100.txt +paris pg3200.txt, pg100.txt +paris! pg3200.txt, pg100.txt +paris!" pg3200.txt +paris). pg3200.txt +paris, pg3200.txt, pg100.txt +paris----" pg3200.txt +paris--two pg3200.txt +paris-made pg3200.txt +paris-ward. pg100.txt +paris. pg3200.txt, pg100.txt +paris." pg3200.txt +paris: pg3200.txt, pg100.txt +paris; pg3200.txt, pg100.txt +paris? pg100.txt +paris]. pg100.txt +parish pg31100.txt, pg3200.txt, pg100.txt +parish, pg31100.txt, pg3200.txt +parish. pg31100.txt, pg3200.txt, pg100.txt +parish; pg31100.txt +parishes pg31100.txt +park pg31100.txt, pg3200.txt, pg100.txt +park, pg31100.txt, pg3200.txt, pg100.txt +park-ward; pg100.txt +park. pg31100.txt, pg3200.txt, pg100.txt +park." pg31100.txt +park.]--from pg3200.txt +park; pg31100.txt, pg100.txt +park? pg3200.txt +parker pg3200.txt +parker's pg3200.txt +parker's. pg3200.txt +parker. pg31100.txt +parkhurst, pg3200.txt +parkin), pg3200.txt +parklands, pg31100.txt +parklands. pg31100.txt +parkman-- pg3200.txt +parkman. pg3200.txt +parlance.'" pg3200.txt +parle pg3200.txt +parle, pg100.txt +parle- pg100.txt +parle. pg100.txt +parle; pg100.txt +parley pg31100.txt, pg100.txt +parley. pg100.txt +parley] pg100.txt +parleying, pg3200.txt +parliament pg3200.txt, pg100.txt +parliament, pg31100.txt, pg3200.txt, pg100.txt +parliament--toole pg3200.txt +parliament. pg3200.txt, pg100.txt +parliament; pg100.txt +parliament? pg100.txt +parliamentary pg3200.txt +parliaments pg3200.txt +parlor pg3200.txt +parlor, pg3200.txt +parlor. pg3200.txt +parlor; pg3200.txt +parlor? pg3200.txt +parlors pg3200.txt +parlors?" pg3200.txt +parlour pg31100.txt +parlour, pg31100.txt +parlour-boarder pg31100.txt +parlour-car pg3200.txt +parlour. pg31100.txt, pg3200.txt, pg100.txt +parlours, pg31100.txt +parma pg3200.txt +parole." pg3200.txt +parolles pg100.txt +parolles! pg100.txt +parolles, pg100.txt +parolles. pg100.txt +parolles.' pg100.txt +parolles] pg100.txt +parowtee. pg3200.txt +parquette, pg3200.txt +parquette. pg3200.txt +parramatta pg3200.txt +parrot pg3200.txt, pg100.txt +parrot, pg3200.txt +parrot-teacher. pg100.txt +parrot. pg3200.txt, pg100.txt +parrot; pg3200.txt +parrots pg3200.txt +parrots, pg3200.txt +parry pg3200.txt +parry. pg31100.txt +parrys pg31100.txt +parsees pg3200.txt +parsley pg3200.txt +parsloe. pg3200.txt +parson pg31100.txt, pg3200.txt, pg100.txt +parson. pg31100.txt, pg3200.txt, pg100.txt +parson." pg3200.txt +parson; pg3200.txt +parson? pg100.txt +parsonage pg31100.txt +parsonage, pg31100.txt +parsonage-house; pg31100.txt +parsonage. pg31100.txt +parsonage: pg31100.txt +parsonage; pg31100.txt +parsons pg3200.txt +part pg31100.txt, pg3200.txt, pg100.txt +part! pg100.txt +part, pg31100.txt, pg3200.txt, pg100.txt +part- pg100.txt +part--bears pg3200.txt +part--but pg3200.txt +part--there pg31100.txt +part-sister pg3200.txt +part. pg31100.txt, pg3200.txt, pg100.txt +part." pg31100.txt, pg3200.txt +part.-- pg31100.txt +part: pg3200.txt +part; pg3200.txt, pg100.txt +part? pg100.txt +partake pg31100.txt, pg3200.txt, pg100.txt +partake. pg3200.txt, pg100.txt +partake? pg100.txt +partaken pg3200.txt +partaker. pg100.txt +partakers pg3200.txt +partaketh pg3200.txt +parted pg31100.txt, pg3200.txt, pg100.txt +parted, pg31100.txt, pg3200.txt, pg100.txt +parted. pg31100.txt, pg3200.txt, pg100.txt +parted." pg3200.txt +parted: pg3200.txt +parted? pg100.txt +parthenon, pg3200.txt +parthia pg100.txt +parthia. pg100.txt +parthian pg3200.txt +partial pg31100.txt, pg3200.txt +partial, pg3200.txt +partialities pg31100.txt, pg3200.txt +partialities, pg3200.txt +partiality pg31100.txt, pg3200.txt +partiality, pg31100.txt +partiality. pg31100.txt +partialize pg100.txt +partially pg31100.txt, pg3200.txt +partially, pg31100.txt +participants pg3200.txt +participate. pg100.txt +participated pg3200.txt +participation pg31100.txt +participles. pg3200.txt +particle pg3200.txt, pg100.txt +particles pg3200.txt +particular pg31100.txt, pg3200.txt, pg100.txt +particular, pg31100.txt, pg3200.txt, pg100.txt +particular- pg100.txt +particular. pg31100.txt, pg3200.txt, pg100.txt +particular." pg31100.txt, pg3200.txt +particular; pg31100.txt, pg3200.txt, pg100.txt +particular? pg3200.txt +particular?" pg3200.txt +particular?' pg3200.txt +particularities. pg100.txt +particularity pg3200.txt +particularity: pg31100.txt +particularize. pg3200.txt +particularized pg3200.txt +particularly pg31100.txt, pg3200.txt +particularly, pg31100.txt, pg3200.txt +particularly. pg3200.txt +particularly." pg31100.txt +particularly; pg31100.txt +particulars pg31100.txt, pg3200.txt, pg100.txt +particulars, pg31100.txt, pg3200.txt +particulars--i pg3200.txt +particulars. pg31100.txt, pg3200.txt, pg100.txt +particulars." pg31100.txt, pg3200.txt +particulars: pg3200.txt +particulars?" pg3200.txt +parties pg31100.txt, pg3200.txt, pg100.txt +parties" pg3200.txt +parties, pg31100.txt, pg3200.txt +parties- pg100.txt +parties. pg3200.txt, pg100.txt +parties." pg31100.txt +parties; pg3200.txt +parties;--with pg31100.txt +parties? pg3200.txt +parties?" pg31100.txt, pg3200.txt +parting pg31100.txt, pg3200.txt, pg100.txt +parting) pg31100.txt +parting, pg31100.txt, pg3200.txt +parting-- pg3200.txt +parting. pg3200.txt +parting: pg100.txt +parting; pg31100.txt, pg3200.txt +partisan? pg100.txt +partisans pg100.txt +partisans. pg100.txt +partita pg3200.txt +partition, pg100.txt +partition. pg100.txt +partition; pg3200.txt +partitions pg3200.txt +partly pg31100.txt, pg3200.txt, pg100.txt +partly, pg3200.txt, pg100.txt +partner pg31100.txt, pg3200.txt, pg100.txt +partner!" pg31100.txt +partner'd pg100.txt +partner, pg31100.txt, pg3200.txt, pg100.txt +partner. pg31100.txt, pg100.txt +partner." pg31100.txt +partner; pg31100.txt, pg100.txt +partner;--the pg31100.txt +partner?" pg31100.txt, pg3200.txt +partners pg31100.txt +partners" pg3200.txt +partners, pg31100.txt, pg100.txt +partners. pg3200.txt +partners." pg31100.txt +partnership pg3200.txt +partook pg3200.txt +partridge pg31100.txt +partridge, pg31100.txt, pg3200.txt +partridges pg31100.txt +parts pg31100.txt, pg3200.txt, pg100.txt +parts, pg31100.txt, pg3200.txt, pg100.txt +parts--"but pg31100.txt +parts. pg3200.txt, pg100.txt +parts." pg3200.txt +parts: pg100.txt +parts; pg3200.txt, pg100.txt +parts? pg100.txt +party pg31100.txt, pg3200.txt, pg100.txt +party!' pg3200.txt +party, pg31100.txt, pg3200.txt +party- pg100.txt +party--_i_ pg31100.txt +party--in pg31100.txt +party-emperor, pg3200.txt +party. pg31100.txt, pg3200.txt, pg100.txt +party." pg31100.txt, pg3200.txt +party: pg3200.txt +party; pg31100.txt, pg3200.txt +party? pg3200.txt, pg100.txt +party?" pg31100.txt +parvenu pg3200.txt +parvenus; pg3200.txt +pas pg3200.txt +pas' pg3200.txt +pashful? pg100.txt +pasquerel pg3200.txt +pasquerel, pg3200.txt +pass pg31100.txt, pg3200.txt, pg100.txt +pass! pg100.txt +pass'd pg100.txt +pass'd, pg100.txt +pass'd. pg100.txt +pass, pg3200.txt, pg100.txt +pass. pg31100.txt, pg3200.txt, pg100.txt +pass." pg3200.txt +pass.' pg3200.txt +pass: pg3200.txt +pass; pg3200.txt, pg100.txt +pass? pg100.txt +pass?" pg3200.txt +passable pg31100.txt, pg3200.txt +passable, pg3200.txt +passable. pg100.txt +passado! pg100.txt +passage pg31100.txt, pg3200.txt, pg100.txt +passage!" pg3200.txt +passage, pg31100.txt, pg3200.txt +passage. pg3200.txt, pg100.txt +passage." pg31100.txt +passage.' pg3200.txt +passage.]} pg3200.txt +passage: pg3200.txt +passage; pg31100.txt +passage? pg100.txt +passage?" pg31100.txt +passages pg3200.txt, pg100.txt +passages, pg3200.txt +passages. pg3200.txt +passageway pg3200.txt +passageway, pg3200.txt +passed pg31100.txt, pg3200.txt +passed, pg31100.txt, pg3200.txt +passed-- pg3200.txt +passed. pg31100.txt, pg3200.txt +passed." pg31100.txt +passed; pg3200.txt +passel pg3200.txt +passene, pg3200.txt +passenger pg3200.txt +passenger's pg3200.txt +passenger, pg3200.txt +passenger. pg3200.txt, pg100.txt +passengers pg3200.txt +passengers, pg3200.txt, pg100.txt +passengers. pg3200.txt, pg100.txt +passengers; pg100.txt +passenjare! pg3200.txt +passenjare!" pg3200.txt +passenjare." pg3200.txt +passer-by pg3200.txt +passers pg3200.txt +passers-by pg3200.txt +passes pg3200.txt, pg100.txt +passes! pg100.txt +passes, pg3200.txt +passes. pg31100.txt, pg3200.txt, pg100.txt +passes." pg31100.txt +passes; pg3200.txt +passeth pg3200.txt +passing pg31100.txt, pg3200.txt, pg100.txt +passing) pg3200.txt +passing, pg3200.txt +passing. pg31100.txt, pg3200.txt +passing." pg3200.txt +passion pg31100.txt, pg3200.txt, pg100.txt +passion! pg31100.txt +passion!- pg100.txt +passion, pg31100.txt, pg3200.txt, pg100.txt +passion- pg100.txt +passion--envy pg3200.txt +passion--the pg3200.txt +passion. pg3200.txt, pg100.txt +passion: pg3200.txt +passion; pg3200.txt, pg100.txt +passion? pg100.txt +passionate pg3200.txt +passionately pg3200.txt +passioning pg100.txt +passionless, pg3200.txt +passionlessly pg3200.txt +passions pg31100.txt, pg100.txt +passions, pg3200.txt +passions. pg31100.txt, pg3200.txt +passport pg3200.txt +passport, pg3200.txt +passport. pg3200.txt, pg100.txt +passports pg3200.txt +passports, pg3200.txt +passports. pg3200.txt +past pg31100.txt, pg3200.txt, pg100.txt +past! pg3200.txt +past, pg31100.txt, pg3200.txt, pg100.txt +past," pg3200.txt +past-- pg31100.txt +past. pg31100.txt, pg3200.txt, pg100.txt +past." pg3200.txt +past: pg100.txt +past; pg3200.txt, pg100.txt +paste pg3200.txt +paste; pg3200.txt, pg100.txt +pasted pg3200.txt +pastime pg100.txt +pastime, pg100.txt +pastime. pg3200.txt +pastime? pg100.txt +pastimes pg3200.txt +pastor pg3200.txt +pastor, pg3200.txt +pastor-universal pg3200.txt +pastor. pg3200.txt +pastry, pg31100.txt +pastry--oh pg3200.txt +pastry. pg3200.txt, pg100.txt +pasturage pg31100.txt, pg3200.txt +pasture-land pg3200.txt +pasture. pg31100.txt, pg100.txt +pasture." pg3200.txt +pasture? pg100.txt +pastures pg3200.txt +pat pg3200.txt +pat, pg3200.txt +pat." pg3200.txt +pat?" pg3200.txt +patay pg3200.txt +patay, pg3200.txt, pg100.txt +patay--check. pg3200.txt +patay. pg3200.txt +patch pg3200.txt +patch! pg100.txt +patch'd pg100.txt +patch'd. pg100.txt +patch, pg3200.txt +patch? pg100.txt +patched pg3200.txt +patched, pg3200.txt +patches pg3200.txt +patches!- pg100.txt +patches, pg3200.txt +patches; pg3200.txt +patching pg3200.txt +pate pg100.txt +pate, pg100.txt +pate. pg100.txt +patent pg3200.txt, pg100.txt +patent, pg3200.txt +patent-right--how pg3200.txt +patent." pg3200.txt +patented?" pg3200.txt +patents. pg3200.txt +patents; pg31100.txt +pates pg100.txt +pates, pg100.txt +path pg31100.txt, pg3200.txt +path! pg3200.txt +path, pg31100.txt, pg3200.txt, pg100.txt +path--the pg3200.txt +path. pg3200.txt, pg100.txt +path." pg31100.txt +path; pg3200.txt +path?" pg3200.txt +pathetic pg31100.txt, pg3200.txt +pathetic, pg3200.txt +pathetic. pg3200.txt +pathetic." pg3200.txt +pathetic: pg3200.txt +pathetic; pg3200.txt +pathetic?" pg31100.txt +pathetical! pg100.txt +pathetically pg3200.txt +pathetically-welcome pg3200.txt +pathfinder pg3200.txt +pathfinder, pg3200.txt +pathfinder. pg3200.txt +pathfinder.' pg3200.txt +pathless pg3200.txt +pathless, pg3200.txt +pathos pg3200.txt +pathos, pg3200.txt +pathos. pg3200.txt +pathos: pg3200.txt +paths pg3200.txt +paths, pg3200.txt +paths. pg100.txt +pathway pg3200.txt +pathway, pg3200.txt +patience pg31100.txt, pg3200.txt, pg100.txt +patience! pg100.txt +patience!' pg100.txt +patience!--and pg31100.txt +patience, pg31100.txt, pg3200.txt, pg100.txt +patience. pg3200.txt, pg100.txt +patience; pg3200.txt, pg100.txt +patience? pg100.txt +patient pg31100.txt, pg3200.txt, pg100.txt +patient's; pg3200.txt +patient, pg3200.txt +patient--'twill pg3200.txt +patient--a pg3200.txt +patient. pg3200.txt, pg100.txt +patient." pg3200.txt +patient.' pg3200.txt +patient; pg100.txt +patiently pg3200.txt +patiently, pg3200.txt, pg100.txt +patiently. pg3200.txt, pg100.txt +patiently.' pg3200.txt +patients pg3200.txt +patients, pg3200.txt +patients. pg3200.txt, pg100.txt +patients." pg3200.txt +patients? pg3200.txt +patois pg3200.txt +patriarchal pg3200.txt +patriarchs pg3200.txt +patriarchs--or pg3200.txt +patrician pg3200.txt +patrician. pg3200.txt +patricians pg100.txt +patricians, pg100.txt +patrick pg3200.txt +patrick's pg3200.txt +patrie pg3200.txt +patrimony, pg100.txt +patrimony. pg100.txt +patrimony; pg100.txt +patriot pg3200.txt +patriot--and pg3200.txt +patriotic pg3200.txt +patriotic, pg3200.txt +patriotism pg3200.txt +patriotism. pg31100.txt, pg3200.txt +patriots pg3200.txt +patriots. pg3200.txt +patroclus pg100.txt +patroclus, pg100.txt +patroclus. pg100.txt +patroclus; pg100.txt +patroclus? pg100.txt +patrol pg3200.txt +patron pg3200.txt +patron'- pg100.txt +patron." pg31100.txt +patronage pg31100.txt, pg3200.txt, pg100.txt +patronage, pg3200.txt +patronage.' pg3200.txt +patroness, pg31100.txt +patroness." pg31100.txt +patronized pg31100.txt +patronizingly, pg3200.txt +patrons pg3200.txt +patrons: pg3200.txt +pats pg3200.txt +patsy pg3200.txt +patsy--" pg3200.txt +patted pg3200.txt +pattering pg3200.txt +pattern pg31100.txt, pg3200.txt +pattern, pg31100.txt, pg3200.txt +pattern. pg31100.txt, pg3200.txt +patterned pg3200.txt +patterning pg3200.txt +patterns pg3200.txt +patterns; pg3200.txt +patterson." pg3200.txt +pattle pg100.txt +patty pg31100.txt +pau-puk-keewis--' pg3200.txt +paucas pg100.txt +paucity pg3200.txt +paul pg3200.txt +paul's. pg100.txt +paul, pg3200.txt, pg100.txt +paul--two pg3200.txt +paul. pg3200.txt +paul; pg3200.txt +paulina pg100.txt +paulina! pg100.txt +paulina, pg100.txt +paulina. pg100.txt +paulina; pg100.txt +paunch? pg100.txt +pauper pg3200.txt +pauper! pg3200.txt +pauper, pg3200.txt +pauper-shod pg3200.txt +pauper. pg3200.txt +pauper." pg3200.txt +paupers pg3200.txt +paupers! pg3200.txt +pause pg31100.txt, pg3200.txt, pg100.txt +pause, pg31100.txt, pg3200.txt, pg100.txt +pause,--"they pg31100.txt +pause-- pg3200.txt +pause--and pg3200.txt +pause--then pg3200.txt +pause. pg31100.txt, pg3200.txt, pg100.txt +pause.-- pg31100.txt +pause.] pg3200.txt +pause: pg3200.txt, pg100.txt +pause? pg100.txt +paused pg31100.txt, pg3200.txt +paused--"oh! pg31100.txt +paused. pg31100.txt +pauses, pg3200.txt +pausing pg31100.txt, pg3200.txt +pausing, pg31100.txt +paved pg3200.txt +paved, pg3200.txt +paved. pg3200.txt +pavement pg31100.txt, pg3200.txt +pavement, pg3200.txt +pavement. pg3200.txt +pavements pg3200.txt +pavements, pg3200.txt +pavilion pg100.txt +pavilion, pg3200.txt +pavilion. pg3200.txt, pg100.txt +pavilion?' pg3200.txt +paving-stone pg3200.txt +paving-stones. pg3200.txt +paw pg100.txt +paw, pg3200.txt, pg100.txt +pawed pg3200.txt +pawn pg31100.txt, pg3200.txt, pg100.txt +pawn'd pg100.txt +pawn'd: pg100.txt +pawn. pg100.txt +pawn; pg100.txt +pawnbroker pg3200.txt +paws pg3200.txt +paws, pg3200.txt, pg100.txt +paws. pg3200.txt +paws; pg100.txt +pay pg31100.txt, pg3200.txt, pg100.txt +pay't. pg3200.txt +pay, pg31100.txt, pg3200.txt, pg100.txt +pay-up pg3200.txt +pay. pg31100.txt, pg3200.txt, pg100.txt +pay." pg31100.txt, pg3200.txt +pay.' pg100.txt +pay; pg3200.txt, pg100.txt +pay? pg3200.txt, pg100.txt +pay?..... pg3200.txt +payable pg3200.txt +payers pg3200.txt +payest; pg100.txt +paying pg31100.txt, pg3200.txt, pg100.txt +payment pg31100.txt, pg3200.txt, pg100.txt +payment, pg31100.txt +payment- pg100.txt +payment. pg100.txt +payment." pg3200.txt +payments pg31100.txt, pg3200.txt, pg100.txt +payments?" pg3200.txt +paynim." pg3200.txt +payroll. pg3200.txt +pays pg3200.txt +pays. pg100.txt +pea-nuts pg3200.txt +peabody, pg3200.txt +peace pg31100.txt, pg3200.txt, pg100.txt +peace! pg3200.txt, pg100.txt +peace!" pg3200.txt +peace!--and pg3200.txt +peace, pg3200.txt, pg100.txt +peace," pg3200.txt +peace- pg100.txt +peace--a pg3200.txt +peace. pg31100.txt, pg3200.txt, pg100.txt +peace." pg3200.txt +peace.' pg100.txt +peace: pg3200.txt, pg100.txt +peace; pg31100.txt, pg3200.txt, pg100.txt +peace? pg3200.txt, pg100.txt +peaceably pg3200.txt +peaceably. pg100.txt +peaceably; pg3200.txt +peaceful pg3200.txt +peaceful, pg3200.txt +peacefully pg3200.txt +peacefully. pg3200.txt +peacemaker; pg3200.txt +peaces. pg100.txt +peach pg3200.txt +peach-vine pg3200.txt +peaches pg3200.txt +peaches. pg3200.txt +peacock, pg3200.txt +peacock-shams pg3200.txt +peak pg3200.txt, pg100.txt +peak. pg31100.txt, pg3200.txt +peaked pg3200.txt +peaks pg3200.txt +peaks, pg3200.txt +peaks. pg3200.txt +peal pg3200.txt +peal, pg100.txt +peal. pg100.txt +peal?" pg3200.txt +pealing pg3200.txt +peanuts pg3200.txt +pear! pg100.txt +pear. pg100.txt +pear; pg100.txt +peard pg100.txt +pearl pg100.txt +pearl, pg100.txt +pearl. pg100.txt +pearl; pg100.txt +pearls pg3200.txt, pg100.txt +pearls, pg31100.txt, pg3200.txt +pearly pg3200.txt +pears: pg100.txt +peas; pg3200.txt +peasant pg3200.txt +peasant! pg100.txt +peasant, pg3200.txt, pg100.txt +peasant-child pg3200.txt +peasant-girl pg3200.txt +peasant-maid pg3200.txt +peasant. pg100.txt +peasant." pg3200.txt +peasant]. pg100.txt +peasantry pg3200.txt +peasants pg3200.txt, pg100.txt +peasants, pg3200.txt, pg100.txt +peasants- pg100.txt +peasants. pg3200.txt +peasants: pg3200.txt +peascod. pg100.txt +pease, pg100.txt +pease; pg100.txt +peaseblossom, pg100.txt +peaseblossom. pg100.txt +peaseblossom? pg100.txt +pebble pg100.txt +pebble-splashes. pg3200.txt +pebble. pg100.txt +pebbles pg3200.txt +pebbles, pg3200.txt +pebblework. pg3200.txt +pebbly pg3200.txt +peck pg3200.txt +pecking pg3200.txt +pecks pg3200.txt +pectoris, pg3200.txt +peculiar pg31100.txt, pg3200.txt +peculiar," pg3200.txt +peculiar. pg3200.txt +peculiar; pg3200.txt +peculiarities pg31100.txt, pg3200.txt +peculiarities. pg3200.txt +peculiarity pg3200.txt +peculiarity, pg3200.txt +peculiarly pg31100.txt, pg3200.txt +peculiarly. pg3200.txt +pecuniary pg31100.txt, pg3200.txt +pecuniary, pg3200.txt +pedal pg3200.txt +pedals; pg3200.txt +pedant pg100.txt +pedant, pg100.txt +pedant; pg100.txt +peddle pg3200.txt +peddler's pg3200.txt +peddler- pg3200.txt +peddlers-- pg3200.txt +peddling pg3200.txt +pedestal pg3200.txt +pedestal!" pg3200.txt +pedestal, pg3200.txt +pedestal?" pg3200.txt +pedestal] pg100.txt +pedestals pg3200.txt +pedestrian pg3200.txt +pedestrianism. pg3200.txt +pedestrianism." pg3200.txt +pedigree pg3200.txt, pg100.txt +pedigree, pg100.txt +pedigree. pg100.txt +pedigree? pg100.txt +pedlar. pg3200.txt, pg100.txt +pedlar; pg100.txt +pedometer pg3200.txt +pedometer, pg3200.txt +pedometer; pg3200.txt +pedro. pg100.txt +peeked pg3200.txt +peep pg3200.txt, pg100.txt +peep-holes pg3200.txt +peep. pg100.txt +peep; pg100.txt +peeping pg3200.txt +peeps pg100.txt +peeps. pg100.txt +peer pg3200.txt, pg100.txt +peer! pg100.txt +peer, pg100.txt +peer. pg3200.txt, pg100.txt +peerage." pg3200.txt +peered pg3200.txt +peering pg3200.txt +peerless pg3200.txt +peerless, pg3200.txt, pg100.txt +peerless. pg100.txt +peers pg3200.txt, pg100.txt +peers! pg100.txt +peers, pg100.txt +peers. pg100.txt +peers; pg100.txt +peevish. pg31100.txt, pg100.txt +peg pg3200.txt +peg--the pg3200.txt +peg. pg3200.txt +peg?" pg3200.txt +pegasus pg100.txt +pegasus. pg100.txt +pegged pg3200.txt +pegging pg3200.txt +pegs pg3200.txt +pegs. pg3200.txt +peice pg31100.txt +peking?' pg3200.txt +pelf; pg100.txt +pelican, pg100.txt +pelisse." pg31100.txt +pell-mell pg100.txt +pell-mell, pg3200.txt +pell-mell; pg100.txt +pella pg100.txt +pellet pg3200.txt +pelted pg3200.txt +pelting pg3200.txt +pemberley pg31100.txt +pemberley, pg31100.txt +pemberley. pg31100.txt +pemberley." pg31100.txt +pemberley?" pg31100.txt +pembroke pg3200.txt, pg100.txt +pembroke! pg100.txt +pembroke, pg31100.txt +pembroke?" pg3200.txt +pen pg31100.txt, pg3200.txt, pg100.txt +pen!" pg3200.txt +pen) pg100.txt +pen, pg3200.txt, pg100.txt +pen-strokes pg3200.txt +pen. pg3200.txt, pg100.txt +pen." pg3200.txt +pen.' pg3200.txt +pen; pg31100.txt, pg3200.txt, pg100.txt +pen? pg3200.txt +pen?" pg3200.txt +penalties pg100.txt +penalty pg31100.txt, pg3200.txt +penalty, pg100.txt +penalty. pg100.txt +penalty? pg100.txt +penance pg31100.txt, pg100.txt +penance. pg100.txt +penance; pg31100.txt +penang-madras,...............1,280 pg3200.txt +pence pg3200.txt, pg100.txt +pence. pg100.txt +pence? pg100.txt +pencil pg3200.txt +pencil, pg3200.txt +pencil--she pg31100.txt +pencil. pg3200.txt +pencil]. pg3200.txt +penciled pg3200.txt +pencils pg3200.txt +pendant pg3200.txt +pendergrass, pg3200.txt +pending pg3200.txt +pending. pg3200.txt +pending? pg3200.txt +pendleton pg3200.txt +pendulum pg3200.txt +pendulum, pg3200.txt +pendulum--the pg3200.txt +pendulum-fashion, pg3200.txt +pendulum. pg3200.txt +penelope, pg31100.txt +penetrate pg3200.txt +penetrate. pg100.txt +penetrated pg3200.txt +penetrated, pg3200.txt +penetrating pg31100.txt, pg3200.txt +penetrating. pg3200.txt +penetration pg31100.txt +penetration, pg31100.txt +penitence, pg31100.txt +penitence. pg100.txt +penitent pg3200.txt +penitent, pg100.txt +penitent. pg31100.txt, pg100.txt +penitent; pg100.txt +penitent? pg100.txt +penitentiary pg3200.txt +penitents pg100.txt +penman. pg3200.txt +penmanship pg3200.txt +penmanship. pg3200.txt +penn'd, pg100.txt +penned pg3200.txt +pennies pg3200.txt +pennies, pg3200.txt +pennies--wonderful pg3200.txt +pennies. pg3200.txt +pennies." pg3200.txt +penniless pg3200.txt +penniless, pg3200.txt +penniless. pg3200.txt +penniless." pg3200.txt +penning pg100.txt +pennons?" pg3200.txt +pennsylvania pg3200.txt +pennsylvania's pg3200.txt +pennsylvania, pg3200.txt +pennsylvania. pg3200.txt +pennsylvania?" pg3200.txt +penny pg3200.txt +penny! pg3200.txt +penny) pg3200.txt +penny, pg3200.txt, pg100.txt +penny. pg3200.txt, pg100.txt +penny; pg3200.txt +pennybacker pg3200.txt +pennyworth pg31100.txt, pg100.txt +pennyworth. pg100.txt +penola pg3200.txt +penrod, pg3200.txt +pens pg3200.txt +pens, pg3200.txt, pg100.txt +pense. pg100.txt +pensez pg3200.txt +pension pg3200.txt, pg100.txt +pension-beaurivage." pg3200.txt +pension. pg3200.txt +pension." pg3200.txt +pensions, pg3200.txt +pensively pg3200.txt, pg100.txt +pent pg3200.txt +pentecost, pg100.txt +penthesilea. pg100.txt +penurious?' pg3200.txt +penury pg100.txt +penury. pg3200.txt +penury? pg100.txt +penwork. pg3200.txt +penzance pg3200.txt +penzance's pg3200.txt +penzance, pg3200.txt +people pg31100.txt, pg3200.txt, pg100.txt +people! pg31100.txt, pg100.txt +people!" pg3200.txt +people": pg3200.txt +people' pg3200.txt +people's pg31100.txt, pg3200.txt +people), pg3200.txt +people, pg31100.txt, pg3200.txt, pg100.txt +people- pg100.txt +people--' pg3200.txt +people--all pg3200.txt +people--and pg3200.txt +people--apparently pg3200.txt +people--as pg3200.txt +people--at pg3200.txt +people--but pg3200.txt +people--everything pg3200.txt +people--we pg3200.txt +people--whereas pg3200.txt +people. pg31100.txt, pg3200.txt, pg100.txt +people." pg31100.txt, pg3200.txt +people.' pg100.txt +people: pg3200.txt, pg100.txt +people; pg31100.txt, pg3200.txt, pg100.txt +people? pg31100.txt, pg3200.txt, pg100.txt +people?" pg31100.txt, pg3200.txt +people] pg100.txt +peopled. pg3200.txt +peoples pg3200.txt +peoples, pg3200.txt +peoples--the pg3200.txt +peoples. pg3200.txt +pepper pg3200.txt +pepper, pg3200.txt +pepper-box pg3200.txt +pepper. pg3200.txt +pepper: pg3200.txt +peppermint, pg3200.txt +pepys' pg3200.txt +per pg3200.txt +per'aps." pg3200.txt +per'aps?" pg3200.txt +per--" pg3200.txt +peradventure pg3200.txt, pg100.txt +peradventure: pg3200.txt +peradventures pg100.txt +perceive pg31100.txt, pg3200.txt, pg100.txt +perceive, pg31100.txt, pg100.txt +perceive." pg3200.txt +perceive? pg100.txt +perceived pg31100.txt, pg3200.txt +perceived, pg31100.txt, pg100.txt +perceived..... pg3200.txt +perceiving pg31100.txt, pg3200.txt +perceiving, pg31100.txt +percent pg3200.txt +percentage pg3200.txt +percentage. pg3200.txt +percentage." pg3200.txt +percentage; pg3200.txt +perceptible pg31100.txt, pg3200.txt +perceptible. pg31100.txt, pg3200.txt +perceptibly. pg3200.txt +perception pg31100.txt, pg3200.txt +perception. pg3200.txt +perch pg3200.txt +perch'd, pg100.txt +perch. pg3200.txt, pg100.txt +perchance pg3200.txt, pg100.txt +perchance, pg3200.txt, pg100.txt +perchance. pg100.txt +perched pg3200.txt +perched, pg3200.txt +percy pg3200.txt, pg100.txt +percy, pg100.txt +percy--but pg3200.txt +percy. pg100.txt +perdita pg100.txt +perdita! pg100.txt +perdita, pg100.txt +perdita] pg100.txt +perdition pg3200.txt, pg100.txt +perdition, pg3200.txt +perdition. pg3200.txt, pg100.txt +perdition." pg3200.txt +perdu! pg100.txt +perdu!- pg100.txt +perdy. pg100.txt +perdy; pg100.txt +pere pg3200.txt +pere. pg100.txt +peremptorily. pg3200.txt +peremptory pg3200.txt, pg100.txt +peremptory, pg100.txt +peremptory. pg100.txt +peremptory? pg100.txt +perennial pg3200.txt +perfect pg31100.txt, pg3200.txt, pg100.txt +perfect, pg3200.txt, pg100.txt +perfect--no pg3200.txt +perfect. pg31100.txt, pg3200.txt, pg100.txt +perfect." pg3200.txt +perfect: pg3200.txt +perfect; pg3200.txt +perfected pg31100.txt, pg3200.txt +perfected, pg3200.txt +perfected. pg100.txt +perfecting pg3200.txt +perfection pg31100.txt, pg3200.txt, pg100.txt +perfection! pg100.txt +perfection, pg3200.txt, pg100.txt +perfection--when pg3200.txt +perfection. pg31100.txt, pg3200.txt, pg100.txt +perfection; pg3200.txt +perfection? pg3200.txt +perfection?" pg31100.txt +perfections pg31100.txt, pg100.txt +perfections, pg100.txt +perfections. pg31100.txt, pg3200.txt +perfections; pg31100.txt +perfectly pg31100.txt, pg3200.txt, pg100.txt +perfectly, pg3200.txt +perfectly. pg3200.txt +perfectly." pg31100.txt, pg3200.txt +perfectly.--why pg31100.txt +perfidious, pg100.txt +perfidiously pg100.txt +perforce pg31100.txt, pg3200.txt, pg100.txt +perforce, pg100.txt +perforce. pg100.txt +perform pg31100.txt, pg3200.txt, pg100.txt +perform! pg3200.txt +perform'd pg100.txt +perform'd! pg100.txt +perform'd, pg100.txt +perform'd. pg100.txt +perform'd; pg100.txt +perform'd? pg100.txt +perform, pg3200.txt, pg100.txt +perform. pg100.txt +perform." pg3200.txt +perform; pg100.txt +performance pg31100.txt, pg3200.txt, pg100.txt +performance, pg31100.txt, pg3200.txt +performance. pg3200.txt, pg100.txt +performance." pg31100.txt +performance: pg3200.txt +performance; pg3200.txt +performance? pg3200.txt, pg100.txt +performance?' pg3200.txt +performances pg31100.txt, pg3200.txt +performances, pg31100.txt, pg100.txt +performances. pg3200.txt +performed pg31100.txt, pg3200.txt +performed. pg3200.txt +performed." pg3200.txt +performer pg3200.txt +performer's pg31100.txt +performer, pg100.txt +performer." pg31100.txt +performers pg31100.txt, pg3200.txt +performing pg3200.txt, pg100.txt +performs pg100.txt +perfum'd, pg100.txt +perfum'd; pg100.txt +perfume pg3200.txt +perfume. pg100.txt +perfumed pg100.txt +perfumes pg100.txt +perfumes, pg3200.txt +perfumes. pg3200.txt +perfunctory pg3200.txt +perfunctory. pg3200.txt +perfuses pg3200.txt +perhaps pg31100.txt, pg3200.txt, pg100.txt +perhaps). pg3200.txt +perhaps, pg31100.txt, pg3200.txt +perhaps," pg31100.txt +perhaps--" pg31100.txt +perhaps--i pg3200.txt +perhaps--though, pg31100.txt +perhaps--wanted pg3200.txt +perhaps. pg3200.txt, pg100.txt +perhaps." pg31100.txt, pg3200.txt +perhaps:] pg3200.txt +perhaps; pg3200.txt +perhaps? pg3200.txt, pg100.txt +perhaps?" pg3200.txt +perhapsers, pg3200.txt +periapts; pg100.txt +peril pg3200.txt, pg100.txt +peril, pg3200.txt, pg100.txt +peril- pg100.txt +peril-- pg3200.txt +peril. pg3200.txt, pg100.txt +peril." pg3200.txt +peril.' pg3200.txt, pg100.txt +perilous pg3200.txt +perilous, pg100.txt +perils pg3200.txt, pg100.txt +perils, pg3200.txt +perils. pg100.txt +perils." pg3200.txt +perimeter pg3200.txt +period pg31100.txt, pg3200.txt, pg100.txt +period! pg100.txt +period, pg31100.txt, pg3200.txt, pg100.txt +period--only pg3200.txt +period. pg31100.txt, pg3200.txt, pg100.txt +period." pg31100.txt +period; pg3200.txt +periodical pg3200.txt +periodicals pg3200.txt +periodicals. pg3200.txt +periodicals; pg3200.txt +periods pg31100.txt +periods. pg3200.txt +perish pg3200.txt, pg100.txt +perish! pg100.txt +perish!" pg3200.txt +perish!' pg3200.txt +perish'd. pg100.txt +perish'd; pg100.txt +perish, pg3200.txt +perish. pg3200.txt, pg100.txt +perish.'" pg3200.txt +perish: pg100.txt +perishable! pg3200.txt +perishable. pg3200.txt +perished pg3200.txt +perished!" pg3200.txt +perished, pg3200.txt +perished. pg3200.txt +perishing pg3200.txt +perishing, pg3200.txt +perishing. pg3200.txt, pg100.txt +perishing." pg3200.txt +periwig, pg100.txt +periwig. pg100.txt +perjur'd pg100.txt +perjur'd, pg100.txt +perjure pg100.txt +perjuries, pg100.txt +perjury pg100.txt +perjury, pg3200.txt, pg100.txt +perjury. pg3200.txt, pg100.txt +perjury." pg3200.txt +perjury: pg100.txt +perjury; pg100.txt +perjury? pg100.txt +perkins pg3200.txt +permafoy! pg100.txt +permanency pg3200.txt +permanent pg31100.txt, pg3200.txt +permanent, pg3200.txt +permanent. pg3200.txt +permanent." pg3200.txt +permanently pg31100.txt, pg3200.txt +permanently, pg3200.txt +permanently. pg31100.txt, pg3200.txt +permanently: pg3200.txt +permeated pg3200.txt +permissible, pg3200.txt +permissibly pg3200.txt +permission pg31100.txt, pg3200.txt, pg100.txt +permission, pg3200.txt, pg100.txt +permission. pg31100.txt, pg3200.txt, pg100.txt +permission; pg3200.txt +permit pg31100.txt, pg3200.txt, pg100.txt +permit, pg3200.txt, pg100.txt +permit-- pg3200.txt +permit--out pg3200.txt +permit. pg3200.txt +permit." pg3200.txt +permits, pg3200.txt +permitted pg31100.txt, pg3200.txt, pg100.txt +permitted, pg31100.txt +permitted--under pg3200.txt +permitted. pg3200.txt +permitting pg31100.txt, pg3200.txt +pernatty pg3200.txt +pernicious pg3200.txt +pernicious. pg100.txt +pernicious; pg31100.txt +perpend. pg100.txt +perpendicular pg3200.txt +perpendicular- pg100.txt +perpendicular. pg3200.txt +perpendicularly, pg3200.txt +perpetrator pg3200.txt +perpetual pg31100.txt, pg3200.txt, pg100.txt +perpetual, pg100.txt +perpetual; pg3200.txt +perpetually. pg31100.txt, pg100.txt +perpetuated pg3200.txt +perpetuated, pg3200.txt +perpetuates pg3200.txt +perpetuating pg3200.txt +perpetuity pg3200.txt, pg100.txt +perpetuity, pg100.txt +perpetuity? pg3200.txt +perplex pg3200.txt +perplex'd pg100.txt +perplex'd? pg100.txt +perplexed pg3200.txt +perplexed, pg3200.txt +perplexed. pg31100.txt, pg3200.txt +perplexed: pg3200.txt +perplexing pg31100.txt, pg3200.txt +perplexities pg3200.txt +perplexities, pg3200.txt +perplexities. pg3200.txt +perplexity pg31100.txt +perplexity! pg100.txt +perplexity, pg31100.txt +perplexity. pg3200.txt +perquisites. pg3200.txt +perry pg31100.txt +perry's pg3200.txt +perry. pg31100.txt +perry." pg31100.txt +perrys, pg31100.txt +pers-one. pg100.txt +persecuted pg3200.txt +persecuted. pg3200.txt +persecuting pg3200.txt +persecution pg3200.txt +persecution. pg3200.txt +persecutions, pg3200.txt +persecutions. pg3200.txt +persecutor pg3200.txt +persecutors pg3200.txt +persecutors. pg3200.txt +perseus: pg100.txt +persever pg100.txt +persever, pg100.txt +persever. pg100.txt +perseverance pg31100.txt, pg3200.txt +perseverance, pg3200.txt +perseverance. pg3200.txt +persevere. pg31100.txt +persevere.' pg3200.txt +persevered pg31100.txt +persevered, pg31100.txt +persevering pg31100.txt +persevering, pg31100.txt +persevering." pg31100.txt +persist pg3200.txt, pg100.txt +persisted pg3200.txt +persisted, pg3200.txt +persisted. pg3200.txt +persisted; pg3200.txt +persistence, pg3200.txt +persistency pg3200.txt +persistency; pg3200.txt +persistent pg3200.txt +persistent, pg3200.txt +persistent--a pg3200.txt +persistently pg3200.txt +persistently, pg3200.txt +persisting pg31100.txt +persists pg100.txt +persists: pg3200.txt +person pg31100.txt, pg3200.txt, pg100.txt +person! pg3200.txt, pg100.txt +person's pg31100.txt, pg3200.txt +person's, pg31100.txt +person) pg31100.txt +person, pg31100.txt, pg3200.txt, pg100.txt +person," pg3200.txt +person- pg100.txt +person--" pg3200.txt +person--for pg3200.txt +person--not pg3200.txt +person--to pg3200.txt +person. pg31100.txt, pg3200.txt, pg100.txt +person." pg31100.txt, pg3200.txt +person: pg3200.txt, pg100.txt +person; pg31100.txt, pg3200.txt, pg100.txt +person? pg100.txt +person?" pg3200.txt +personae pg100.txt +personae. pg31100.txt, pg100.txt +personae: pg3200.txt +personage pg31100.txt, pg3200.txt +personage, pg3200.txt, pg100.txt +personage. pg3200.txt +personage; pg3200.txt +personages pg31100.txt, pg3200.txt, pg100.txt +personages,) pg3200.txt +personages: pg3200.txt +personal pg31100.txt, pg3200.txt +personal, pg3200.txt +personal." pg3200.txt +personal; pg31100.txt, pg3200.txt +personal? pg3200.txt +personaleinkommensteuerschatzungskommissionsmitgliedsreisekostenrechnungs pg3200.txt +personality pg3200.txt +personality--so pg3200.txt +personality. pg3200.txt +personally pg31100.txt, pg3200.txt +personally. pg3200.txt +personally; pg3200.txt +personally?' pg3200.txt +personated. pg100.txt +personating pg3200.txt +personification, pg3200.txt +persons pg31100.txt, pg3200.txt, pg100.txt +persons! pg100.txt +persons, pg3200.txt +persons--throwing pg3200.txt +persons. pg3200.txt, pg100.txt +persons." pg31100.txt, pg3200.txt +persons; pg3200.txt +persons? pg3200.txt +perspective pg3200.txt +perspective, pg3200.txt +perspective; pg3200.txt +perspectives pg3200.txt +perspiration pg3200.txt +perspiration, pg3200.txt +perspiration. pg3200.txt +perspire, pg3200.txt +perspired pg3200.txt +perspiring pg3200.txt +persuadableness. pg31100.txt +persuade pg31100.txt, pg3200.txt, pg100.txt +persuade. pg100.txt +persuade." pg31100.txt +persuaded pg31100.txt, pg3200.txt, pg100.txt +persuaded! pg100.txt +persuaded, pg31100.txt +persuaded- pg100.txt +persuaded--it pg3200.txt +persuaded. pg31100.txt +persuaded." pg31100.txt, pg3200.txt +persuaded; pg100.txt +persuader pg3200.txt +persuades pg100.txt +persuading pg31100.txt, pg3200.txt, pg100.txt +persuading; pg100.txt +persuasion pg31100.txt, pg3200.txt, pg100.txt +persuasion! pg31100.txt +persuasion, pg31100.txt, pg100.txt +persuasion. pg31100.txt, pg3200.txt, pg100.txt +persuasion." pg31100.txt +persuasions pg3200.txt +persuasions, pg3200.txt +persuasions. pg3200.txt +persuasions; pg3200.txt +persuasive pg3200.txt +persuasive, pg3200.txt +persuasive. pg3200.txt +persuasive: pg3200.txt +persuasively: pg3200.txt +persuasiveness pg3200.txt +persuasiveness: pg3200.txt +pert pg31100.txt, pg3200.txt +pert, pg31100.txt +pertain, pg100.txt +pertaining pg3200.txt +perth pg31100.txt +pertilope, pg3200.txt +pertinacity pg3200.txt +pertinency, pg3200.txt +pertinent pg100.txt +pertinent; pg100.txt +pertly pg3200.txt +pertly. pg100.txt +pertolope pg3200.txt +perturbation pg31100.txt +perturbation, pg31100.txt +perturbation. pg31100.txt +perturbation; pg31100.txt +perturbations. pg100.txt +pertynante, pg3200.txt +peru: pg3200.txt +peruna. pg3200.txt +perus'd. pg100.txt +peruse pg31100.txt, pg3200.txt, pg100.txt +peruse, pg100.txt +perused pg100.txt +perusing pg31100.txt, pg3200.txt +pervade pg3200.txt +pervades pg3200.txt +pervading pg31100.txt, pg3200.txt +pervasive pg3200.txt +pervasively pg3200.txt +perverse, pg100.txt +perverse; pg31100.txt +perverseness pg31100.txt +perversion pg31100.txt +perversity pg3200.txt +pervert pg31100.txt +pester pg3200.txt +pestered pg3200.txt +pestering pg3200.txt +pestering, pg3200.txt +pestiferious pg3200.txt +pestilence pg3200.txt, pg100.txt +pestilence! pg100.txt +pestilence, pg100.txt +pestilence. pg3200.txt +pet pg3200.txt +pet), pg3200.txt +pet, pg3200.txt +pet--never pg3200.txt +petals pg3200.txt +pete pg3200.txt +pete." pg3200.txt +peter pg3200.txt, pg100.txt +peter! pg100.txt +peter's pg3200.txt +peter's, pg3200.txt +peter's. pg3200.txt +peter's; pg3200.txt +peter, pg3200.txt, pg100.txt +peter- pg100.txt +peter--peterson. pg3200.txt +peter. pg3200.txt, pg100.txt +peter." pg3200.txt +peter.] pg100.txt +peter: pg3200.txt +peter; pg3200.txt, pg100.txt +peter]. pg100.txt +peterborough, pg31100.txt +peters pg3200.txt +peters, pg3200.txt +petersburg pg3200.txt +petersburg. pg3200.txt +peterson. pg3200.txt +petit pg3200.txt +petition pg3200.txt, pg100.txt +petition, pg3200.txt, pg100.txt +petition--hadleyburg pg3200.txt +petition. pg3200.txt, pg100.txt +petition; pg3200.txt +petitioner pg3200.txt, pg100.txt +petitioner, pg100.txt +petitions; pg100.txt +peto pg100.txt +peto. pg100.txt +peto.] pg100.txt +peto; pg100.txt +petrarch, pg3200.txt +petrified pg3200.txt +petrified, pg3200.txt +petrified. pg3200.txt +petrifies. pg3200.txt +petrify pg3200.txt +petrify." pg3200.txt +petruchio pg100.txt +petruchio! pg100.txt +petruchio, pg100.txt +petruchio. pg100.txt +pets pg3200.txt +pets, pg3200.txt +petted pg3200.txt +petted, pg3200.txt +petter pg100.txt +petticoat pg3200.txt, pg100.txt +petticoat. pg3200.txt, pg100.txt +petticoat; pg100.txt +petticoat? pg100.txt +petticoats pg100.txt +petting pg3200.txt +petting, pg3200.txt +pettish,-- pg3200.txt +pettish. pg3200.txt +pettitoes pg100.txt +petty pg3200.txt +petulance pg31100.txt +peu, pg3200.txt +pew pg3200.txt +pew, pg31100.txt +pew." pg3200.txt +pew?" pg3200.txt +pews pg3200.txt +pewter pg3200.txt +pfennig, pg3200.txt +pff! pg3200.txt +phaethon, pg100.txt +phaeton pg31100.txt +phaeton, pg31100.txt +phaetons pg31100.txt +phallus pg3200.txt +phantasimes, pg100.txt +phantom pg3200.txt +phantom. pg3200.txt +phantom; pg3200.txt +phantoms pg3200.txt +phantoms--yes, pg3200.txt +pharamond pg100.txt +pharamond, pg100.txt +pharamond: pg100.txt +pharaoh's pg100.txt +pharmacy) pg3200.txt +pharsalia, pg100.txt +phase pg3200.txt +phase. pg3200.txt +phases pg3200.txt +phases, pg3200.txt +phazed pg3200.txt +pheasants' pg31100.txt +pheasants, pg31100.txt +pheasants." pg31100.txt +pheazar. pg100.txt +phebe pg100.txt +phebe, pg100.txt +phebe. pg100.txt +phelps pg3200.txt +phelps!" pg3200.txt +phelps'. pg3200.txt +phelps; pg3200.txt +phenix's pg3200.txt +phenomenal pg3200.txt +phenomenally pg3200.txt +phidias, pg3200.txt +phil pg3200.txt +phil!" pg3200.txt +phil. pg3200.txt +phil." pg3200.txt +phil?" pg3200.txt +philadelphia pg3200.txt +philadelphia, pg3200.txt +philadelphia--not pg3200.txt +philadelphia. pg3200.txt +philander, pg31100.txt +philanthropic pg31100.txt +philanthropists-- pg3200.txt +philanthropy pg3200.txt +philario pg100.txt +philario's, pg100.txt +philarmonus! pg100.txt +philip pg3200.txt +philip's pg3200.txt +philip, pg3200.txt +philip. pg3200.txt, pg100.txt +philip." pg3200.txt +philip? pg100.txt +philip?" pg3200.txt +philippa pg31100.txt +philippa. pg31100.txt +philippan. pg100.txt +philippe, pg100.txt +philippi pg3200.txt +philippi. pg100.txt +philippines pg3200.txt +philippines, pg3200.txt +phillipene!" pg3200.txt +phillips pg31100.txt, pg3200.txt +phillips's. pg31100.txt +phillips, pg31100.txt +phillipses pg31100.txt +phillipses, pg31100.txt +philo pg100.txt +philologist, pg3200.txt +philologist. pg3200.txt +philomel pg100.txt +philomel. pg100.txt +philosopher pg31100.txt, pg3200.txt, pg100.txt +philosopher! pg100.txt +philosopher, pg3200.txt +philosopher. pg3200.txt, pg100.txt +philosophers pg3200.txt, pg100.txt +philosophers. pg3200.txt +philosophers." pg31100.txt +philosophic pg31100.txt +philosophical pg3200.txt, pg100.txt +philosophizing pg3200.txt +philosophizing, pg3200.txt +philosophizings, pg3200.txt +philosophy pg31100.txt, pg3200.txt, pg100.txt +philosophy! pg100.txt +philosophy, pg3200.txt, pg100.txt +philosophy--which pg3200.txt +philosophy. pg3200.txt, pg100.txt +philosophy." pg3200.txt +philosophy.] pg3200.txt +philostrate pg100.txt +philostrate, pg100.txt +philostrate. pg100.txt +philotus pg100.txt +phlegmatic, pg3200.txt +phlox-bore pg3200.txt +phoebe. pg3200.txt +phoebus. pg100.txt +phoenicia. pg100.txt +phoenicians. pg3200.txt +phoenix pg100.txt +phoenix. pg3200.txt +phoenix; pg100.txt +phonograph pg3200.txt +phonograph, pg3200.txt +phonograph. pg3200.txt +phonographer--and pg3200.txt +phonographic pg3200.txt +phonographs pg3200.txt +phonographs, pg3200.txt +phonography. pg3200.txt +phosphorescent pg3200.txt +photo pg3200.txt +photograph pg3200.txt +photograph. pg3200.txt +photographed pg3200.txt +photographer pg3200.txt +photographers pg3200.txt +photographic pg3200.txt +photographs pg3200.txt +photographs, pg3200.txt +photographs. pg3200.txt +photographs; pg3200.txt +photography, pg3200.txt +photography; pg3200.txt +photography? pg3200.txt +phrase pg31100.txt, pg3200.txt, pg100.txt +phrase! pg100.txt +phrase, pg31100.txt, pg3200.txt, pg100.txt +phrase--and pg31100.txt, pg3200.txt +phrase--they pg3200.txt +phrase-books pg3200.txt +phrase. pg3200.txt, pg100.txt +phrase: pg3200.txt +phrase; pg3200.txt, pg100.txt +phrase? pg3200.txt +phrased pg3200.txt +phraseology pg3200.txt +phrases pg3200.txt +phrases, pg3200.txt, pg100.txt +phrases-- pg3200.txt +phrases--phrases pg3200.txt +phrases. pg3200.txt +phrasing pg3200.txt +phrasing, pg3200.txt +phrasing. pg3200.txt +phrygia. pg100.txt +phrynia pg100.txt +phthisis. pg3200.txt +phunny pg3200.txt +phy--" pg3200.txt +phyllis pg3200.txt +physic pg31100.txt, pg100.txt +physic, pg100.txt +physic. pg100.txt +physic." pg31100.txt +physic? pg100.txt +physical pg3200.txt, pg100.txt +physical--passions pg3200.txt +physical-passions pg3200.txt +physical? pg3200.txt +physically pg3200.txt +physically, pg3200.txt +physician pg3200.txt, pg100.txt +physician, pg3200.txt, pg100.txt +physician. pg3200.txt, pg100.txt +physician; pg100.txt +physicians pg31100.txt, pg3200.txt, pg100.txt +physicians, pg100.txt +physicians. pg3200.txt, pg100.txt +physicians." pg3200.txt +physicians: pg3200.txt +physicians; pg3200.txt +physics, pg3200.txt +piano pg3200.txt +piano's pg3200.txt +piano, pg3200.txt +piano-forte pg31100.txt +piano. pg3200.txt +piano] pg3200.txt +pianoforte pg31100.txt +pianoforte, pg31100.txt +pianoforte. pg31100.txt +pianoforte." pg31100.txt +pianoforte; pg31100.txt +piazza pg3200.txt +picardy pg100.txt +pick pg31100.txt, pg3200.txt, pg100.txt +pick'd pg100.txt +pick, pg3200.txt +pick-purse. pg100.txt +pickaxes]. pg100.txt +picked pg3200.txt +picket pg3200.txt +pickets. pg3200.txt +picking pg3200.txt +pickle. pg100.txt +pickle? pg100.txt +pickpocket pg3200.txt +pickpocket. pg3200.txt +pickpockets pg3200.txt +pickpurse. pg100.txt +picks pg3200.txt +picnic pg3200.txt +picnic, pg3200.txt +picnic." pg3200.txt +picnickers pg3200.txt +picture pg31100.txt, pg3200.txt, pg100.txt +picture! pg3200.txt +picture!" pg31100.txt, pg3200.txt +picture!--and pg31100.txt +picture, pg3200.txt, pg100.txt +picture-- pg3200.txt +picture--" pg3200.txt +picture--a pg3200.txt +picture--biblical pg3200.txt +picture--the pg3200.txt +picture-book. pg3200.txt +picture. pg31100.txt, pg3200.txt, pg100.txt +picture." pg3200.txt +picture: pg3200.txt +picture; pg3200.txt +picture? pg3200.txt, pg100.txt +picture?" pg31100.txt +pictured pg3200.txt +pictures pg31100.txt, pg3200.txt +pictures, pg3200.txt +pictures--it pg3200.txt +pictures--pictures pg3200.txt +pictures. pg3200.txt +pictures; pg31100.txt, pg3200.txt, pg100.txt +pictures] pg3200.txt +picturesque pg31100.txt, pg3200.txt +picturesque, pg3200.txt +picturesque. pg3200.txt +picturesque." pg31100.txt +picturesque: pg3200.txt +picturesquely pg3200.txt +picturesquely-clad pg3200.txt +picturesqueness pg3200.txt +picturesqueness. pg3200.txt +picturesquenesses. pg3200.txt +picturings pg3200.txt +pie pg3200.txt +pie!" pg3200.txt +pie, pg3200.txt, pg100.txt +pie--but pg3200.txt +pie-plant. pg3200.txt +pie. pg3200.txt +pie." pg3200.txt +pie; pg3200.txt, pg100.txt +piec'd pg100.txt +piec'd, pg100.txt +piece pg31100.txt, pg3200.txt, pg100.txt +piece, pg3200.txt, pg100.txt +piece. pg3200.txt, pg100.txt +piece.) pg3200.txt +piece; pg100.txt +pieced pg3200.txt +pieces pg3200.txt, pg100.txt +pieces! pg100.txt +pieces, pg3200.txt +pieces. pg31100.txt, pg3200.txt, pg100.txt +pieces." pg31100.txt, pg3200.txt +pieces; pg3200.txt +pieces] pg100.txt +piecing pg3200.txt +pied pg100.txt +pied. pg3200.txt +pier pg3200.txt, pg100.txt +pier, pg3200.txt +pier--with pg3200.txt +pierce pg3200.txt, pg100.txt +pierce, pg3200.txt, pg100.txt +pierce? pg100.txt +pierced pg3200.txt +pierces pg3200.txt +pierre pg3200.txt +pierre, pg3200.txt +pierre." pg3200.txt +pierre?" pg3200.txt +pierres, pg3200.txt +pies pg3200.txt +pies, pg31100.txt, pg3200.txt +pies." pg3200.txt +piety pg100.txt +piety! pg100.txt +piety, pg3200.txt +piety-hive pg3200.txt +piety." pg3200.txt +piety; pg3200.txt, pg100.txt +pig-nuts; pg100.txt +pig. pg3200.txt +pig; pg3200.txt, pg100.txt +pig?" pg3200.txt +pigeon pg31100.txt +pigeon, pg3200.txt +pigeon-egg pg100.txt +pigeon-hole pg3200.txt +pigeons pg100.txt +pigeons. pg3200.txt +pigeons? pg100.txt +piggins, pg3200.txt +pigmies pg3200.txt +pigrogromitus, pg100.txt +pigs pg31100.txt, pg3200.txt +pigs; pg3200.txt +pike? pg100.txt +pikes pg100.txt +pikes, pg100.txt +pil'd pg100.txt +pilasters pg3200.txt +pilasters, pg3200.txt +pilates pg100.txt +pile pg3200.txt, pg100.txt +pile, pg3200.txt +pile--" pg3200.txt +pile--twenty pg3200.txt +pile-driver, pg3200.txt +pile. pg3200.txt, pg100.txt +pile." pg3200.txt +piled pg3200.txt +piles pg3200.txt +piles, pg3200.txt +piles. pg3200.txt +pilfering. pg3200.txt +pilgrim pg3200.txt, pg100.txt +pilgrim, pg100.txt +pilgrim. pg100.txt +pilgrim; pg3200.txt +pilgrimage pg3200.txt +pilgrimage! pg100.txt +pilgrimage, pg3200.txt, pg100.txt +pilgrimage. pg3200.txt, pg100.txt +pilgrimage: pg100.txt +pilgrimage; pg100.txt +pilgrimages pg3200.txt +pilgriming pg3200.txt +pilgrims pg3200.txt +pilgrims, pg3200.txt +pilgrims--home pg3200.txt +pilgrims. pg3200.txt +pilgrims; pg3200.txt +piling pg3200.txt +pill, pg3200.txt +pill; pg3200.txt +pillage, pg100.txt +pillaged pg3200.txt +pillagers pg100.txt +pillar pg3200.txt +pillar, pg100.txt +pillar. pg3200.txt +pillared pg3200.txt +pillars pg3200.txt +pillars, pg3200.txt +pillars. pg3200.txt +pillars; pg100.txt +pilloried. pg3200.txt +pillory pg3200.txt +pillory--" pg3200.txt +pillory; pg3200.txt +pillow pg3200.txt, pg100.txt +pillow, pg3200.txt, pg100.txt +pillow. pg3200.txt, pg100.txt +pillow; pg100.txt +pillow? pg100.txt +pillows pg3200.txt +pillows, pg3200.txt +pillows. pg100.txt +pills pg3200.txt +pills, pg100.txt +pills. pg3200.txt +pilot pg3200.txt, pg100.txt +pilot!' pg3200.txt +pilot'.]} pg3200.txt +pilot's pg3200.txt +pilot, pg3200.txt, pg100.txt +pilot- pg3200.txt +pilot-house pg3200.txt +pilot-house! pg3200.txt +pilot-house--brown pg3200.txt +pilot-house--to pg3200.txt +pilot-house--very pg3200.txt +pilot-house. pg3200.txt +pilot-house; pg3200.txt +pilot-house?' pg3200.txt +pilot. pg3200.txt +pilot; pg3200.txt +pilot?' pg3200.txt +piloted pg3200.txt +pilothouse. pg3200.txt +piloting pg3200.txt +piloting, pg3200.txt +piloting. pg3200.txt +pilots pg3200.txt +pilots, pg3200.txt +pilots-- pg3200.txt +pilots. pg3200.txt +pimp!' pg3200.txt +pimpernell; pg100.txt +pimple. pg3200.txt +pimples. pg31100.txt +pimply pg3200.txt +pimply; pg3200.txt +pin pg3200.txt, pg100.txt +pin! pg3200.txt +pin'd. pg100.txt +pin's pg3200.txt +pin, pg3200.txt, pg100.txt +pin-heads. pg3200.txt +pin. pg3200.txt, pg100.txt +pinacotek, pg3200.txt +pinafores, pg3200.txt +pinch pg100.txt +pinch'd pg100.txt +pinch, pg100.txt +pinch-spotted pg100.txt +pinch." pg3200.txt +pinchbeck pg3200.txt +pinched pg3200.txt +pinches pg100.txt +pinches: pg100.txt +pinches; pg100.txt +pinching pg31100.txt +pindarus pg100.txt +pindarus, pg100.txt +pindarus. pg100.txt +pindarus? pg100.txt +pine pg3200.txt, pg100.txt +pine, pg100.txt +pine. pg100.txt +pine; pg100.txt +pined pg3200.txt, pg100.txt +pineries pg3200.txt +pinery pg31100.txt +pines pg100.txt +pines" pg3200.txt +pines, pg3200.txt +pines. pg3200.txt +pines; pg3200.txt +pinfold. pg100.txt +pining pg31100.txt +pinion'd. pg100.txt +pink pg3200.txt +pink. pg3200.txt +pink." pg3200.txt +pinkerton pg3200.txt +pinnacle pg3200.txt +pinnacles pg3200.txt +pinnacles, pg3200.txt +pinned pg31100.txt, pg3200.txt +pinning pg3200.txt +pins pg31100.txt, pg3200.txt, pg100.txt +pins, pg100.txt +pint pg3200.txt +pint, pg100.txt +pint." pg3200.txt +pints pg31100.txt, pg3200.txt +pion, pg3200.txt +pioneer pg3200.txt +pioneer, pg3200.txt +pioneer. pg31100.txt, pg3200.txt +pioneers pg3200.txt +pious pg3200.txt +pious-- pg3200.txt +pious--also pg3200.txt +pious. pg3200.txt +pious? pg100.txt +pipe pg3200.txt, pg100.txt +pipe, pg31100.txt, pg3200.txt +pipe-smoking, pg3200.txt +pipe-smoking. pg3200.txt +pipe-stem. pg3200.txt +pipe-wine pg100.txt +pipe. pg3200.txt +pipe: pg3200.txt +pipe; pg100.txt +pipe? pg100.txt +pipe] pg100.txt +piper pg3200.txt +piper, pg3200.txt +pipers! pg100.txt +pipes pg3200.txt, pg100.txt +pipes, pg3200.txt, pg100.txt +pipes--and pg3200.txt +piping pg3200.txt +pippins pg100.txt +piquancy pg3200.txt +pique pg3200.txt +piquet, pg31100.txt +piracies pg3200.txt +piracy pg3200.txt +piracy: pg3200.txt +piraeus pg3200.txt +pirate pg3200.txt, pg100.txt +pirate, pg3200.txt, pg100.txt +pirate. pg3200.txt, pg100.txt +pirate." pg3200.txt +pirate; pg3200.txt, pg100.txt +pirate?" pg3200.txt +pirated, pg3200.txt +pirates pg3200.txt +pirates, pg3200.txt, pg100.txt +pirates. pg3200.txt, pg100.txt +pirates." pg3200.txt +piratical pg3200.txt +pirating, pg3200.txt +pirouetted pg3200.txt +pisa pg3200.txt, pg100.txt +pisa, pg100.txt +pisa. pg3200.txt, pg100.txt +pisa? pg100.txt +pisan pg3200.txt +pisanio pg100.txt +pisanio! pg100.txt +pisanio!- pg100.txt +pisanio, pg100.txt +pisanio- pg100.txt +pisanio; pg100.txt +pisanio? pg100.txt +pish! pg100.txt +pisiness. pg3200.txt +pison pg3200.txt +pistol pg3200.txt, pg100.txt +pistol! pg100.txt +pistol, pg3200.txt, pg100.txt +pistol- pg100.txt +pistol-bullet, pg3200.txt +pistol. pg3200.txt, pg100.txt +pistol; pg3200.txt +pistol? pg100.txt +pistol?" pg3200.txt +pistol?' pg3200.txt +pistoletta. pg31100.txt +pistols pg3200.txt +pistols, pg3200.txt +pistols. pg3200.txt +pistols." pg3200.txt +piston-rod. pg3200.txt +pit pg3200.txt, pg100.txt +pit! pg100.txt +pit, pg100.txt +pit. pg3200.txt, pg100.txt +pit; pg100.txt +pit?" pg3200.txt +pit] pg100.txt +pitcairn pg3200.txt +pitch pg31100.txt, pg100.txt +pitch'd pg100.txt +pitch, pg3200.txt, pg100.txt +pitch- pg3200.txt +pitch-dark pg3200.txt +pitch-smoke. pg3200.txt +pitch. pg100.txt +pitch; pg100.txt +pitched pg3200.txt +pitcher pg3200.txt +pitcher-bearers; pg3200.txt +pitcher; pg3200.txt +pitchers. pg3200.txt +pite, pg3200.txt +piteous pg3200.txt +piteous, pg3200.txt +piteously pg3200.txt +piteously, pg3200.txt +piteously: pg3200.txt +pith, pg100.txt +pith. pg3200.txt +pitiable pg31100.txt, pg3200.txt +pitiable; pg31100.txt +pitied pg31100.txt, pg3200.txt, pg100.txt +pitied, pg100.txt +pitied. pg100.txt +pitied." pg31100.txt +pities- pg100.txt +pitiful pg31100.txt, pg3200.txt, pg100.txt +pitiful! pg100.txt +pitiful, pg3200.txt +pitiful. pg3200.txt, pg100.txt +pitiful." pg31100.txt +pitiful; pg100.txt +pitifullest pg31100.txt +pitifully pg100.txt +pitifully. pg100.txt +pitiless pg3200.txt +pitiless, pg3200.txt +pitiless. pg100.txt +pitiless? pg100.txt +pitilessly pg3200.txt +pittance. pg100.txt +pitted pg3200.txt +pitti, pg3200.txt +pittsburg?' pg3200.txt +pity pg31100.txt, pg3200.txt, pg100.txt +pity! pg100.txt +pity!" pg3200.txt +pity, pg31100.txt, pg3200.txt, pg100.txt +pity- pg100.txt +pity--it pg3200.txt +pity. pg31100.txt, pg3200.txt, pg100.txt +pity." pg3200.txt +pity.'" pg3200.txt +pity: pg3200.txt, pg100.txt +pity; pg3200.txt, pg100.txt +pity? pg3200.txt, pg100.txt +pitying pg3200.txt, pg100.txt +pitying. pg100.txt +pius pg100.txt +piute. pg3200.txt +plac'd pg100.txt +plac'd, pg100.txt +plac'd; pg100.txt +placard pg3200.txt +placarded pg3200.txt +place pg31100.txt, pg3200.txt, pg100.txt +place! pg3200.txt, pg100.txt +place!" pg31100.txt, pg3200.txt +place, pg31100.txt, pg3200.txt, pg100.txt +place," pg31100.txt +place,--bellaggio. pg3200.txt +place- pg100.txt +place--all pg3200.txt +place--and pg3200.txt +place--but pg3200.txt +place--it pg3200.txt +place--still pg3200.txt +place--that pg3200.txt +place--tries pg3200.txt +place-the pg3200.txt +place. pg31100.txt, pg3200.txt, pg100.txt +place." pg31100.txt, pg3200.txt +place.'" pg3200.txt +place.--and pg3200.txt +place: pg31100.txt, pg3200.txt, pg100.txt +place; pg31100.txt, pg3200.txt, pg100.txt +place? pg31100.txt, pg3200.txt, pg100.txt +place?" pg31100.txt, pg3200.txt +place] pg3200.txt +placed pg31100.txt, pg3200.txt, pg100.txt +placed, pg31100.txt +placed. pg31100.txt, pg3200.txt +placed." pg3200.txt +places pg31100.txt, pg3200.txt, pg100.txt +places!--and pg3200.txt +places, pg31100.txt, pg3200.txt, pg100.txt +places--everything. pg3200.txt +places--the pg3200.txt +places. pg31100.txt, pg3200.txt, pg100.txt +places." pg3200.txt +places.' pg3200.txt +places; pg3200.txt, pg100.txt +places? pg3200.txt +placeth pg100.txt +placid pg3200.txt +placid, pg100.txt +placidities, pg3200.txt +placidity, pg31100.txt +placidity. pg3200.txt +placidly pg3200.txt +placidly-- pg3200.txt +placidly. pg3200.txt +placidly: pg3200.txt +placing pg31100.txt, pg3200.txt +plagiarism pg3200.txt +plagiarism: pg3200.txt +plagiarist. pg3200.txt +plagu'd. pg100.txt +plague pg31100.txt, pg3200.txt, pg100.txt +plague! pg100.txt +plague, pg100.txt +plague-patient pg3200.txt +plague. pg3200.txt +plague? pg100.txt +plague?" pg3200.txt +plagued pg31100.txt, pg3200.txt, pg100.txt +plagues pg100.txt +plagues, pg3200.txt, pg100.txt +plagues. pg3200.txt, pg100.txt +plain pg31100.txt, pg3200.txt, pg100.txt +plain! pg3200.txt +plain!" pg31100.txt +plain, pg31100.txt, pg3200.txt, pg100.txt +plain,' pg3200.txt +plain--footprints pg3200.txt +plain--he pg3200.txt +plain--love-songs pg3200.txt +plain--or pg3200.txt +plain-song pg100.txt +plain-spoken. pg3200.txt +plain. pg3200.txt, pg100.txt +plain." pg31100.txt, pg3200.txt +plain.' pg3200.txt +plain: pg31100.txt, pg3200.txt +plain; pg3200.txt, pg100.txt +plain?" pg31100.txt +plainer pg3200.txt +plainest pg3200.txt +plainly pg31100.txt, pg3200.txt, pg100.txt +plainly, pg3200.txt, pg100.txt +plainly. pg31100.txt, pg100.txt +plainness pg100.txt +plainness; pg100.txt +plains pg3200.txt, pg100.txt +plains! pg3200.txt +plains, pg3200.txt, pg100.txt +plains. pg3200.txt +plains; pg3200.txt +plains?" pg3200.txt +plainsong pg100.txt +plaintiff. pg3200.txt +plaintiff?" pg3200.txt +plaintive pg31100.txt +plaintively pg3200.txt +plaintively-- pg3200.txt +plaintively: pg3200.txt +plan pg31100.txt, pg3200.txt +plan, pg31100.txt, pg3200.txt +plan--much pg31100.txt +plan. pg31100.txt, pg3200.txt +plan." pg31100.txt, pg3200.txt +plan: pg3200.txt +plan; pg31100.txt, pg3200.txt +plan? pg3200.txt +plan?" pg31100.txt +plane pg3200.txt +plane." pg3200.txt +planet pg3200.txt +planet, pg3200.txt +planet--put pg3200.txt +planet. pg3200.txt +planet?" pg3200.txt +planets pg3200.txt, pg100.txt +plank pg3200.txt +plank, pg3200.txt +plank." pg3200.txt +planks pg3200.txt +planks, pg100.txt +planks. pg3200.txt +planned pg31100.txt, pg3200.txt +planned, pg3200.txt +planned. pg31100.txt, pg3200.txt +planning pg31100.txt, pg3200.txt +planning, pg3200.txt +planning. pg3200.txt +planning; pg3200.txt +plans pg31100.txt, pg3200.txt +plans, pg31100.txt, pg3200.txt +plans--evidently pg3200.txt +plans--followed pg3200.txt +plans. pg3200.txt +plans." pg3200.txt +plans; pg31100.txt, pg3200.txt +plans?" pg3200.txt +plant pg3200.txt, pg100.txt +plant, pg3200.txt +plant--a pg3200.txt +plant-roots, pg3200.txt +plant. pg3200.txt, pg100.txt +plant; pg100.txt +plantagenet pg100.txt +plantagenet! pg100.txt +plantagenet, pg3200.txt, pg100.txt +plantagenet- pg100.txt +plantagenet. pg100.txt +plantagenet; pg100.txt +plantagenet? pg100.txt +plantagenets. pg100.txt +plantain! pg100.txt +plantain-patch, pg3200.txt +plantation pg3200.txt +plantation, pg3200.txt +plantation. pg3200.txt +plantation." pg3200.txt +plantation.' pg3200.txt +plantation; pg3200.txt +plantation?' pg3200.txt +plantations pg3200.txt +plantations, pg31100.txt, pg3200.txt +planted pg3200.txt +planted, pg100.txt +planted. pg100.txt +planter pg3200.txt +planter's." pg3200.txt +planter's: pg3200.txt +planter, pg3200.txt +planters pg3200.txt +planters; pg3200.txt +plantes pg3200.txt +planting pg3200.txt +planting, pg3200.txt +plants pg3200.txt, pg100.txt +plants, pg31100.txt, pg100.txt +plants. pg3200.txt +plapperude pg3200.txt +plarsterin'." pg3200.txt +plasmon pg3200.txt +plassey, pg3200.txt +plaster pg3200.txt +plaster, pg3200.txt +plaster-cast pg3200.txt +plaster. pg3200.txt, pg100.txt +plastered pg3200.txt +plasterer, pg3200.txt +plasterer; pg100.txt +plasterin'?" pg3200.txt +plastering pg3200.txt +plat pg3200.txt +plat, pg100.txt +plate pg3200.txt +plate, pg31100.txt, pg3200.txt +plate-glass pg3200.txt +plate. pg3200.txt +plateau pg3200.txt +plated pg3200.txt +plates pg3200.txt +plates," pg3200.txt +plates. pg3200.txt +plates." pg3200.txt +plates?" pg3200.txt +platform pg31100.txt, pg3200.txt +platform, pg3200.txt +platform--pealing pg3200.txt +platform. pg3200.txt +platform." pg3200.txt +platform: pg3200.txt +platform; pg3200.txt +platform;" pg3200.txt +platforming. pg3200.txt +platforms pg3200.txt +platitude. pg3200.txt +platitudes pg3200.txt +platitudes.) pg3200.txt +plato. pg3200.txt +platonic pg31100.txt, pg3200.txt +platoon pg3200.txt +platoons pg3200.txt +platte, pg3200.txt +platter. pg3200.txt +platz pg3200.txt +plausibilities pg3200.txt +plausibility pg31100.txt +plausibility, pg3200.txt +plausibility. pg3200.txt +plausible pg31100.txt, pg3200.txt +plausible, pg31100.txt +plausible. pg3200.txt +plausibly pg3200.txt +plausibly. pg3200.txt +play pg31100.txt, pg3200.txt, pg100.txt +play! pg31100.txt, pg100.txt +play" pg3200.txt +play'd pg100.txt +play'd, pg100.txt +play'd. pg100.txt +play'st, pg100.txt +play, pg31100.txt, pg3200.txt, pg100.txt +play- pg100.txt +play-acting pg3200.txt +play-acting. pg3200.txt +play. pg31100.txt, pg3200.txt, pg100.txt +play." pg31100.txt, pg3200.txt +play: pg100.txt +play; pg31100.txt, pg3200.txt, pg100.txt +play;--yes, pg31100.txt +play? pg3200.txt, pg100.txt +play?" pg3200.txt +play?' pg3200.txt +played pg31100.txt, pg3200.txt, pg100.txt +played!' pg3200.txt +played, pg31100.txt, pg3200.txt +played. pg31100.txt +played." pg31100.txt +played: pg3200.txt +played; pg3200.txt +player pg31100.txt, pg100.txt +player. pg3200.txt +player.] pg100.txt +player? pg100.txt +players pg3200.txt, pg100.txt +players' pg3200.txt +players, pg3200.txt +players. pg3200.txt, pg100.txt +players; pg100.txt +playfellow pg100.txt +playfellow, pg31100.txt, pg100.txt +playfellow. pg100.txt +playfellow? pg100.txt +playful pg31100.txt, pg3200.txt +playful. pg31100.txt +playfully, pg3200.txt +playfulness pg3200.txt +playfulness, pg31100.txt, pg3200.txt +playfulness. pg31100.txt +playfulnesses pg3200.txt +playhouse pg100.txt +playing pg31100.txt, pg3200.txt, pg100.txt +playing, pg3200.txt, pg100.txt +playmate, pg3200.txt +playmates pg3200.txt +plays pg3200.txt, pg100.txt +plays, pg31100.txt, pg3200.txt, pg100.txt +plays. pg3200.txt, pg100.txt +plays] pg100.txt +playwright, pg3200.txt +plaza pg3200.txt +plaza, pg3200.txt +plea pg31100.txt, pg3200.txt, pg100.txt +plea, pg3200.txt, pg100.txt +plea. pg3200.txt +plea: pg3200.txt +plead pg31100.txt, pg3200.txt, pg100.txt +plead! pg100.txt +plead, pg100.txt +plead- pg100.txt +plead. pg100.txt +plead: pg3200.txt +plead; pg100.txt +pleaded pg31100.txt, pg3200.txt +pleaded, pg3200.txt +pleaded. pg3200.txt +pleaded: pg3200.txt +pleaders, pg100.txt +pleading pg3200.txt, pg100.txt +pleading, pg100.txt +pleadingly pg3200.txt +pleadings pg3200.txt +pleadings; pg3200.txt +pleads pg3200.txt +pleas'd pg100.txt +pleas'd, pg100.txt +pleas'd. pg100.txt +pleas'd; pg100.txt +pleas, pg3200.txt +pleasant pg31100.txt, pg3200.txt, pg100.txt +pleasant! pg31100.txt +pleasant, pg3200.txt +pleasant. pg31100.txt, pg3200.txt, pg100.txt +pleasant." pg31100.txt +pleasant; pg31100.txt, pg3200.txt +pleasanter pg31100.txt, pg3200.txt +pleasanter, pg3200.txt +pleasantest pg31100.txt, pg3200.txt +pleasantest. pg3200.txt +pleasantly pg31100.txt, pg3200.txt, pg100.txt +pleasantly, pg31100.txt, pg3200.txt +pleasantly-- pg3200.txt +pleasantly. pg31100.txt, pg3200.txt +pleasantly; pg31100.txt +pleasantness pg3200.txt +pleasantry pg31100.txt, pg3200.txt +please pg31100.txt, pg3200.txt, pg100.txt +please! pg3200.txt, pg100.txt +please!" pg3200.txt +please, pg31100.txt, pg3200.txt, pg100.txt +please," pg3200.txt +please--it pg3200.txt +please--there, pg3200.txt +please--this pg3200.txt +please--why?" pg3200.txt +please. pg3200.txt, pg100.txt +please." pg31100.txt, pg3200.txt +please.' pg3200.txt +please.) pg3200.txt +please: pg100.txt +please; pg3200.txt, pg100.txt +please? pg3200.txt +please?" pg3200.txt +pleased pg31100.txt, pg3200.txt, pg100.txt +pleased, pg31100.txt, pg3200.txt +pleased. pg31100.txt, pg3200.txt +pleased." pg31100.txt, pg3200.txt +pleased; pg31100.txt, pg3200.txt +pleases pg31100.txt, pg3200.txt +pleases, pg3200.txt, pg100.txt +pleases. pg3200.txt, pg100.txt +pleases." pg31100.txt +pleases; pg3200.txt +pleaseth pg3200.txt, pg100.txt +pleaseth.] pg3200.txt +pleasing pg31100.txt, pg3200.txt, pg100.txt +pleasing, pg31100.txt, pg100.txt +pleasing," pg31100.txt +pleasing--supposing pg31100.txt +pleasing. pg31100.txt +pleasing." pg31100.txt +pleasurable pg3200.txt +pleasure pg31100.txt, pg3200.txt, pg100.txt +pleasure! pg3200.txt, pg100.txt +pleasure, pg31100.txt, pg3200.txt, pg100.txt +pleasure- pg100.txt +pleasure--and pg3200.txt +pleasure--indeed pg3200.txt +pleasure-drive pg3200.txt +pleasure-ground pg31100.txt +pleasure-seekers, pg3200.txt +pleasure. pg31100.txt, pg3200.txt, pg100.txt +pleasure." pg31100.txt, pg3200.txt +pleasure: pg31100.txt +pleasure; pg31100.txt, pg3200.txt, pg100.txt +pleasure? pg31100.txt, pg3200.txt, pg100.txt +pleasure?" pg3200.txt +pleasures pg31100.txt, pg3200.txt, pg100.txt +pleasures, pg31100.txt, pg3200.txt, pg100.txt +pleasures- pg100.txt +pleasures. pg31100.txt, pg3200.txt, pg100.txt +pleasures; pg3200.txt, pg100.txt +pleasures? pg100.txt +pleasuring. pg3200.txt +pleb pg3200.txt +plebeian pg3200.txt +plebeian, pg3200.txt +plebeian-- pg3200.txt +plebeians pg100.txt +plebeians, pg100.txt +plebeians. pg3200.txt +plebeians; pg100.txt +pledge pg3200.txt +pledge! pg100.txt +pledge, pg3200.txt +pledge--made pg3200.txt +pledge. pg3200.txt, pg100.txt +pledge; pg100.txt +pledged pg3200.txt +pledges pg100.txt +pledging pg3200.txt +pleines pg100.txt +plenteously. pg100.txt +plentiful pg100.txt +plentifully. pg100.txt +plenty pg31100.txt, pg3200.txt, pg100.txt +plenty, pg3200.txt, pg100.txt +plenty--in pg3200.txt +plenty--obstructed pg3200.txt +plenty. pg31100.txt, pg3200.txt, pg100.txt +plenty.' pg100.txt +pless pg100.txt +plessis pg3200.txt +pliant pg3200.txt +plight pg100.txt +plight, pg100.txt +plight. pg3200.txt, pg100.txt +plighted pg31100.txt +plod. pg3200.txt +plodding pg3200.txt +plot pg3200.txt, pg100.txt +plot, pg3200.txt, pg100.txt +plot-proof; pg100.txt +plot. pg3200.txt, pg100.txt +plot? pg100.txt +plots pg3200.txt, pg100.txt +plots, pg3200.txt +plots; pg3200.txt +plotted pg100.txt +plotted. pg100.txt +plough pg100.txt +plough-irons. pg100.txt +ploughboys. pg31100.txt +ploughed pg3200.txt +plow pg3200.txt +plow, pg3200.txt +plow. pg3200.txt +plowed pg3200.txt +plowing pg3200.txt +pluck pg3200.txt, pg100.txt +pluck'd pg100.txt +pluck'd, pg100.txt +pluck, pg100.txt +pluck. pg3200.txt +plucked pg3200.txt +pluckily-- pg3200.txt +plucking pg3200.txt +plucks pg100.txt +plug pg3200.txt +plug!" pg3200.txt +plug, pg3200.txt +plug." pg3200.txt +plug?" pg3200.txt +plugged pg3200.txt +plugs pg3200.txt +plumb pg3200.txt +plumber pg3200.txt +plumber." pg3200.txt +plumbers, pg3200.txt +plume pg3200.txt, pg100.txt +plume." pg3200.txt +plumed pg3200.txt +plumes pg3200.txt +plumes! pg100.txt +plumes, pg3200.txt, pg100.txt +plumes. pg100.txt +plump, pg31100.txt +plums pg31100.txt, pg3200.txt +plums, pg3200.txt +plunder pg3200.txt +plunder. pg3200.txt +plunge pg31100.txt, pg3200.txt +plunged pg3200.txt, pg100.txt +plunges pg3200.txt +plunging pg3200.txt +plungings, pg3200.txt +plunk-plunk-plunk--same pg3200.txt +plural pg3200.txt +plurisy, pg100.txt +plus pg3200.txt +plush. pg3200.txt +ply' pg3200.txt +plymouth pg3200.txt +plymouth, pg3200.txt +plymouth. pg31100.txt +plymouth." pg31100.txt +pneumatics, pg3200.txt +pneumonia. pg3200.txt +po' pg3200.txt +po'ch--twenty pg3200.txt +po- pg100.txt +po-lice!" pg3200.txt +poach pg3200.txt +poca pg3200.txt +pocahontas--oh! pg3200.txt +pocahontas. pg3200.txt +pochahantas. pg3200.txt +pocket pg31100.txt, pg3200.txt, pg100.txt +pocket! pg100.txt +pocket, pg3200.txt, pg100.txt +pocket--" pg3200.txt +pocket--for pg3200.txt +pocket-book, pg31100.txt +pocket-book. pg3200.txt +pocket-book." pg31100.txt +pocket-book; pg31100.txt +pocket-knives pg3200.txt +pocket-mining, pg3200.txt +pocket. pg3200.txt, pg100.txt +pocket." pg3200.txt +pocket; pg3200.txt +pocket? pg100.txt +pocketbook. pg31100.txt +pocketed pg3200.txt +pocketed. pg3200.txt +pocketing pg3200.txt +pockets pg3200.txt, pg100.txt +pockets, pg31100.txt, pg3200.txt +pockets--or pg3200.txt +pockets. pg3200.txt, pg100.txt +pockets." pg31100.txt +pockets; pg3200.txt +poco pg31100.txt +poe pg3200.txt +poe, pg3200.txt +poem pg3200.txt +poem). pg3200.txt +poem, pg3200.txt +poem--and pg3200.txt +poem. pg3200.txt +poems pg3200.txt +poems, pg3200.txt +poems. pg3200.txt +poems: pg3200.txt +poesy, pg100.txt +poesy. pg3200.txt, pg100.txt +poet pg3200.txt, pg100.txt +poet! pg100.txt +poet's pg3200.txt +poet's. pg3200.txt +poet, pg31100.txt, pg3200.txt, pg100.txt +poet-orators pg3200.txt +poet. pg3200.txt, pg100.txt +poet: pg3200.txt +poet? pg100.txt +poetic. pg3200.txt +poetical pg3200.txt +poetical! pg3200.txt +poetical, pg3200.txt +poetical. pg100.txt +poetical? pg100.txt +poetry pg31100.txt, pg3200.txt, pg100.txt +poetry!" pg3200.txt +poetry's pg3200.txt +poetry, pg3200.txt, pg100.txt +poetry--all pg3200.txt +poetry--there pg3200.txt +poetry. pg3200.txt, pg100.txt +poetry; pg31100.txt +poets pg3200.txt +poets, pg31100.txt +poets--" pg3200.txt +poets." pg3200.txt +poi pg3200.txt +poi; pg3200.txt +poictiers, pg100.txt +poignantly pg3200.txt +poins pg100.txt +poins! pg100.txt +poins. pg100.txt +poins.] pg100.txt +poins? pg100.txt +poinsettia--the pg3200.txt +point pg31100.txt, pg3200.txt, pg100.txt +point! pg3200.txt, pg100.txt +point!" pg3200.txt +point, pg31100.txt, pg3200.txt, pg100.txt +point," pg31100.txt +point- pg100.txt +point--a pg31100.txt +point-blank pg3200.txt +point-blank, pg3200.txt +point-devise pg100.txt +point. pg31100.txt, pg3200.txt, pg100.txt +point." pg31100.txt, pg3200.txt +point.' pg3200.txt +point.'" pg31100.txt, pg3200.txt +point: pg31100.txt, pg3200.txt, pg100.txt +point; pg3200.txt, pg100.txt +point?" pg3200.txt +point?' pg3200.txt +pointe pg3200.txt +pointed pg31100.txt, pg3200.txt, pg100.txt +pointed, pg3200.txt +pointed--they pg3200.txt +pointed: pg3200.txt +pointedly pg3200.txt +pointedly--and pg31100.txt +pointer pg3200.txt +pointer, pg3200.txt +pointing pg31100.txt, pg3200.txt +pointing-stock pg100.txt +pointing. pg3200.txt +pointless. pg3200.txt +points pg31100.txt, pg3200.txt, pg100.txt +points) pg31100.txt +points, pg31100.txt, pg100.txt +points. pg31100.txt, pg3200.txt, pg100.txt +points." pg3200.txt +points: pg3200.txt +points; pg31100.txt, pg3200.txt +points? pg100.txt +pointue, pg3200.txt +pois'd pg100.txt +poise pg3200.txt +poise, pg100.txt +poised pg3200.txt +poises pg3200.txt +poison pg3200.txt, pg100.txt +poison'd pg100.txt +poison'd. pg100.txt +poison, pg100.txt +poison. pg31100.txt, pg3200.txt, pg100.txt +poison." pg3200.txt +poison: pg100.txt +poison? pg100.txt +poisoned pg3200.txt +poisoned, pg3200.txt +poisoner pg100.txt +poisoner, pg3200.txt +poisoning pg3200.txt +poisoning? pg100.txt +poisonous pg100.txt +poisons pg3200.txt, pg100.txt +poisons, pg100.txt +poitiers, pg3200.txt +poitiers. pg3200.txt +poke pg3200.txt +poke, pg100.txt +poked pg3200.txt +poker pg3200.txt +poker." pg3200.txt +poking pg31100.txt, pg3200.txt +polack, pg100.txt +polack; pg100.txt +poland, pg100.txt +poland. pg100.txt +poland; pg100.txt +pole pg3200.txt, pg100.txt +pole! pg100.txt +pole, pg3200.txt, pg100.txt +pole--bearing pg3200.txt +pole. pg3200.txt, pg100.txt +polecat pg3200.txt +polecat. pg3200.txt +polecats, pg100.txt +poles pg3200.txt +poles, pg3200.txt +poles. pg3200.txt +poli, pg100.txt +police pg3200.txt +police, pg3200.txt +police--give pg3200.txt +police. pg3200.txt +police; pg3200.txt +police?" pg3200.txt +policeman pg3200.txt +policeman) pg3200.txt +policeman, pg3200.txt +policemen pg3200.txt +policemen; pg3200.txt +policies, pg100.txt +policing. pg3200.txt +policy pg31100.txt, pg3200.txt, pg100.txt +policy! pg100.txt +policy, pg100.txt +policy- pg100.txt +policy--when pg3200.txt +policy. pg3200.txt, pg100.txt +policy: pg100.txt +policy; pg100.txt +policy? pg100.txt +polish pg3200.txt +polish. pg3200.txt +polish." pg31100.txt +polish; pg3200.txt +polished pg3200.txt +polished, pg3200.txt +polisher), pg3200.txt +polishers, pg3200.txt +polishing pg3200.txt +polite pg31100.txt, pg3200.txt +polite, pg31100.txt, pg3200.txt +polite. pg3200.txt +polite; pg3200.txt +politely pg3200.txt +politely, pg31100.txt +politely-- pg3200.txt +politely. pg3200.txt +politely: pg3200.txt +politeness pg31100.txt, pg3200.txt +politeness, pg31100.txt +politeness-- pg31100.txt +politeness--"where pg3200.txt +politeness. pg31100.txt, pg3200.txt +politeness; pg31100.txt +politest pg31100.txt, pg3200.txt +politic pg100.txt +politic, pg100.txt +politic; pg100.txt +political pg3200.txt +politically pg3200.txt +politically, pg3200.txt +politically. pg3200.txt +politician pg3200.txt +politician, pg3200.txt, pg100.txt +politician. pg100.txt +politician." pg3200.txt +politicians, pg3200.txt +politics pg3200.txt +politics, pg31100.txt, pg3200.txt +politics. pg3200.txt +politics; pg3200.txt +politics?" pg3200.txt +polixenes pg100.txt +polixenes, pg100.txt +polixenes. pg100.txt +polixenes] pg100.txt +polkad pg3200.txt +polkas." pg3200.txt +poll pg3200.txt +poll'd. pg100.txt +poll. pg100.txt +poll? pg100.txt +polli." pg3200.txt +polliwigs pg3200.txt +pollute pg3200.txt +polluted pg3200.txt +polluted.' pg3200.txt +polluted?" pg31100.txt +pollution pg3200.txt +pollution. pg100.txt +polly pg3200.txt +polly! pg3200.txt +polly!" pg3200.txt +polly's pg3200.txt +polly, pg3200.txt +polly--all pg3200.txt +polly-voo-franzy--what pg3200.txt +polly?" pg3200.txt +polonaise pg3200.txt +polonius. pg100.txt +polonius.] pg100.txt +polonius? pg100.txt +polonius]. pg100.txt +polydamus pg100.txt +polydore, pg100.txt +polygamy pg3200.txt +polyphemus pg3200.txt +polyxena. pg100.txt +pomatum-cake pg3200.txt +pomatum; pg3200.txt +pomegranates pg3200.txt +pomfret pg100.txt +pomfret, pg100.txt +pomfret. pg100.txt +pommel pg3200.txt +pommel, pg3200.txt +pommel. pg3200.txt +pommels pg3200.txt +pomp pg3200.txt, pg100.txt +pomp, pg100.txt +pomp. pg100.txt +pomp; pg100.txt +pomp? pg100.txt +pompadour, pg3200.txt +pompeii pg3200.txt +pompeii, pg3200.txt +pompeii. pg3200.txt +pompeiians pg3200.txt +pompeius pg100.txt +pompey pg100.txt +pompey! pg100.txt +pompey's pg3200.txt +pompey, pg100.txt +pompey. pg100.txt +pompey; pg100.txt +pompey? pg100.txt +pomps pg3200.txt +pomps, pg3200.txt, pg100.txt +pomroy pg3200.txt +pond pg3200.txt +pond, pg100.txt +ponder pg100.txt +ponderous pg3200.txt +ponderously pg3200.txt +ponds pg3200.txt +pone; pg3200.txt +poniards, pg100.txt +poniatowski pg3200.txt +ponies." pg31100.txt +ponkapog." pg3200.txt +pont; pg100.txt +pontchartrain pg3200.txt +pontifical, pg100.txt +pontius pg3200.txt +pony-rider, pg3200.txt +pony; pg31100.txt +poodle pg3200.txt +pool pg31100.txt, pg3200.txt +pool! pg100.txt +pool, pg3200.txt +poole! pg100.txt +poole? pg100.txt +poor pg31100.txt, pg3200.txt, pg100.txt +poor! pg3200.txt +poor!--but--but--do pg3200.txt +poor, pg31100.txt, pg3200.txt, pg100.txt +poor--a--a--" pg3200.txt +poor--and pg3200.txt +poor-house pg3200.txt +poor-house. pg3200.txt +poor-house; pg3200.txt +poor-john. pg100.txt +poor-quality, pg3200.txt +poor. pg31100.txt, pg3200.txt, pg100.txt +poor." pg31100.txt, pg3200.txt +poor; pg3200.txt, pg100.txt +poor? pg100.txt +poor?" pg31100.txt, pg3200.txt +poorer pg3200.txt +poorest pg3200.txt +poorest, pg100.txt +poorhouse pg3200.txt +poorhouse; pg3200.txt +poorly pg31100.txt +poorly. pg100.txt +pop. pg3200.txt +pope pg3200.txt +pope! pg3200.txt +pope!" pg3200.txt +pope's. pg3200.txt +pope, pg31100.txt, pg3200.txt, pg100.txt +pope--to pg3200.txt +pope. pg100.txt +pope." pg3200.txt +pope.' pg100.txt +pope; pg100.txt +pope?" pg3200.txt +pope?-- pg31100.txt +popedom, pg100.txt +popes pg31100.txt, pg3200.txt +popgun; pg3200.txt +popilius? pg100.txt +popingay, pg100.txt +poplars, pg31100.txt +poplin. pg31100.txt +popocatapetl pg3200.txt +poppies, pg3200.txt +poppies. pg3200.txt +pops pg100.txt +populace pg3200.txt +populace, pg3200.txt +popular pg3200.txt, pg100.txt +popular, pg3200.txt +popular. pg31100.txt, pg3200.txt +popular; pg3200.txt +popular? pg100.txt +populari. pg3200.txt +popularity. pg3200.txt, pg100.txt +popularity; pg100.txt +popularly pg3200.txt +populated pg3200.txt +population pg3200.txt +population, pg3200.txt +population--victoria--killing pg3200.txt +population--which pg3200.txt +population. pg3200.txt +populations pg3200.txt +populations. pg3200.txt +populous pg31100.txt, pg3200.txt, pg100.txt +populous. pg3200.txt +porcelain pg3200.txt +porcelaintype pg3200.txt +porch pg3200.txt +porch, pg3200.txt +porch. pg100.txt +porch; pg3200.txt +porches.' pg3200.txt +pore pg3200.txt +pore, pg3200.txt +pore. pg3200.txt +pored pg3200.txt +pores pg3200.txt +poring pg31100.txt +pork pg31100.txt, pg3200.txt +pork! pg31100.txt +pork, pg3200.txt +pork--" pg31100.txt +pork--jane pg31100.txt +pork. pg100.txt +porn pg100.txt +pornography pg3200.txt +porpentine, pg100.txt +porpentine. pg100.txt +porpentine; pg100.txt +porphyry pg3200.txt +porpoise pg3200.txt +porridge!" pg3200.txt +porridge'; pg31100.txt +porridge. pg100.txt +porringer; pg100.txt +port pg3200.txt, pg100.txt +port, pg31100.txt, pg3200.txt +port. pg3200.txt, pg100.txt +port.'" pg3200.txt +portable, pg100.txt +portable. pg3200.txt +portal! pg100.txt +portal. pg3200.txt +portance, pg100.txt +portend pg100.txt +portend? pg100.txt +portends, pg100.txt +portent pg100.txt +portentous. pg3200.txt +portents pg100.txt +porter pg3200.txt, pg100.txt +porter. pg3200.txt, pg100.txt +porter." pg3200.txt +porter.] pg3200.txt +porter? pg100.txt +porter?] pg100.txt +porters pg3200.txt +porters, pg3200.txt, pg100.txt +porters. pg3200.txt +portia pg100.txt +portia! pg100.txt +portia, pg100.txt +portia. pg100.txt +portia? pg100.txt +portico pg3200.txt +portico, pg3200.txt +portico. pg3200.txt +porticos pg3200.txt +portier pg3200.txt +portier's; pg3200.txt +portier. pg3200.txt +portion pg31100.txt, pg3200.txt +portion! pg3200.txt +portion. pg3200.txt, pg100.txt +portion." pg31100.txt +portions pg3200.txt +portions. pg3200.txt +portly pg3200.txt +portly. pg100.txt +portman-square pg31100.txt +portmanteau, pg3200.txt +porto pg3200.txt +portotartarossa. pg100.txt +portrait pg3200.txt +portrait, pg3200.txt +portrait. pg31100.txt +portrait; pg3200.txt +portraits pg31100.txt, pg3200.txt +portraits! pg3200.txt +portraits, pg31100.txt, pg3200.txt +portraits. pg3200.txt +portraiture. pg3200.txt +portrayal pg3200.txt +portrayed pg3200.txt +ports pg3200.txt, pg100.txt +ports, pg3200.txt +ports. pg3200.txt, pg100.txt +portsmouth pg31100.txt +portsmouth, pg31100.txt +portsmouth. pg31100.txt, pg3200.txt +portsmouth; pg31100.txt +portugal, pg3200.txt +portugal. pg100.txt +portugal.) pg3200.txt +portuguese pg3200.txt +portuguese. pg3200.txt +pos--" pg3200.txt +pose pg3200.txt +posed pg3200.txt +posies. pg100.txt +position pg3200.txt, pg100.txt +position! pg3200.txt +position) pg3200.txt +position, pg3200.txt +position- pg100.txt +position--its pg3200.txt +position. pg3200.txt +position." pg3200.txt +position.] pg3200.txt +position; pg3200.txt +position?" pg3200.txt +positions pg3200.txt +positions, pg3200.txt +positions. pg3200.txt +positive pg31100.txt, pg3200.txt +positive! pg31100.txt +positive, pg3200.txt +positive. pg31100.txt, pg100.txt +positively pg31100.txt, pg3200.txt +posse.' pg100.txt +possess pg31100.txt, pg3200.txt, pg100.txt +possess'd pg100.txt +possess'd, pg100.txt +possess'd. pg100.txt +possess'd; pg100.txt +possess, pg100.txt +possess- pg100.txt +possess. pg31100.txt, pg3200.txt, pg100.txt +possess." pg31100.txt +possess; pg100.txt +possessed pg31100.txt, pg3200.txt +possessed!" pg3200.txt +possessed, pg100.txt +possessed. pg3200.txt +possesses pg31100.txt, pg3200.txt +possesses, pg100.txt +possesses. pg3200.txt +possesses." pg31100.txt +possessing pg31100.txt, pg3200.txt +possessing, pg100.txt +possession pg31100.txt, pg3200.txt, pg100.txt +possession! pg31100.txt +possession, pg3200.txt, pg100.txt +possession. pg31100.txt, pg3200.txt, pg100.txt +possession." pg31100.txt +possession; pg31100.txt, pg100.txt +possession? pg100.txt +possessions pg31100.txt +possessions, pg3200.txt, pg100.txt +possessions. pg3200.txt +possessions; pg3200.txt +possessions? pg100.txt +possessor pg31100.txt, pg3200.txt +possessor's pg3200.txt +posset pg100.txt +possets pg100.txt +possibilities pg31100.txt, pg3200.txt +possibilities, pg3200.txt +possibilities. pg3200.txt +possibilities." pg3200.txt +possibilities: pg3200.txt +possibilities; pg3200.txt +possibility pg31100.txt, pg3200.txt, pg100.txt +possibility, pg3200.txt, pg100.txt +possibility--the pg3200.txt +possibility. pg31100.txt +possible pg31100.txt, pg3200.txt, pg100.txt +possible! pg31100.txt +possible!" pg31100.txt +possible), pg3200.txt +possible, pg31100.txt, pg3200.txt, pg100.txt +possible," pg31100.txt +possible--" pg31100.txt +possible--advice pg31100.txt +possible. pg31100.txt, pg3200.txt, pg100.txt +possible." pg31100.txt, pg3200.txt +possible: pg31100.txt +possible; pg31100.txt, pg3200.txt +possible? pg100.txt +possible?" pg31100.txt, pg3200.txt +possibly pg31100.txt, pg3200.txt +possibly, pg3200.txt +possibly. pg3200.txt +possibly: pg3200.txt +possitable, pg100.txt +post pg31100.txt, pg3200.txt, pg100.txt +post, pg31100.txt, pg3200.txt, pg100.txt +post-cards pg3200.txt +post-graduate pg3200.txt +post-horses pg31100.txt +post-mark pg3200.txt +post-masters pg31100.txt +post-office pg31100.txt, pg3200.txt +post-office, pg31100.txt +post-office--catching pg31100.txt +post-office. pg3200.txt +post-office." pg3200.txt +post. pg3200.txt, pg100.txt +post." pg31100.txt +post.--what pg31100.txt +post.] pg3200.txt +post; pg3200.txt, pg100.txt +post? pg100.txt +post?" pg31100.txt +postage pg3200.txt +postage-stamp pg3200.txt +postage. pg3200.txt +postage." pg3200.txt +postage? pg3200.txt +postal pg3200.txt +postcentral pg3200.txt +postchaise pg31100.txt +poste-haste pg100.txt +posted pg31100.txt, pg3200.txt, pg100.txt +posted, pg3200.txt +posted. pg3200.txt +poster. pg3200.txt +posteriors pg100.txt +posterity pg3200.txt, pg100.txt +posterity!"--and pg3200.txt +posterity, pg100.txt +posterity. pg3200.txt, pg100.txt +posterity." pg3200.txt +posterity? pg100.txt +posterns pg100.txt +posterns, pg100.txt +posthumus pg100.txt +posthumus! pg100.txt +posthumus, pg100.txt +posthumus- pg100.txt +posthumus. pg100.txt +posthumus.' pg100.txt +posthumus? pg100.txt +posting pg3200.txt +postman pg3200.txt +postman. pg3200.txt +postmaster, pg3200.txt +postmaster-general. pg3200.txt +postmaster-general; pg3200.txt +postmaster. pg3200.txt +postpone pg3200.txt +postponed pg3200.txt +postponed. pg3200.txt +postponed." pg3200.txt +postponement pg3200.txt +postponement. pg3200.txt +postponement; pg3200.txt +postpones pg3200.txt +postponing pg31100.txt +posts pg100.txt +posts, pg3200.txt +posts. pg3200.txt +posts? pg100.txt +postscript pg31100.txt, pg3200.txt +postscript-later pg3200.txt +postscript. pg100.txt +postscript: pg3200.txt +posture pg3200.txt, pg100.txt +posture! pg100.txt +posture, pg3200.txt +posture. pg3200.txt, pg100.txt +postures pg3200.txt, pg100.txt +postures, pg3200.txt +posy pg100.txt +pot pg100.txt +pot, pg3200.txt +pot. pg100.txt +potable; pg100.txt +potato pg3200.txt, pg100.txt +potatoes pg31100.txt, pg3200.txt +potatoes, pg3200.txt +potatoes. pg3200.txt +potency pg100.txt +potency, pg100.txt +potency. pg100.txt +potent pg31100.txt, pg3200.txt, pg100.txt +potentates, pg100.txt +potential pg100.txt +pother, pg100.txt +pothon pg3200.txt +potion, pg100.txt +potomac pg3200.txt +potomac" pg3200.txt +potomac, pg3200.txt +potpan! pg100.txt +pots pg3200.txt +pots. pg3200.txt +potter pg3200.txt +potter!" pg3200.txt +potter's pg3200.txt +potter, pg3200.txt +pottery. pg3200.txt +pottle pg100.txt +pottle-pot's pg100.txt +pottle-pot. pg100.txt +potztausendhimmelsdonnerwetter! pg3200.txt +pouce pg3200.txt +pouch pg3200.txt +pouches pg3200.txt +poulengy." pg3200.txt +poulterer pg31100.txt +poulterer." pg31100.txt +poultney pg3200.txt +poultry pg3200.txt +poultry, pg31100.txt, pg3200.txt +poultry--though pg3200.txt +poultry-yards pg31100.txt +poultry. pg31100.txt, pg3200.txt +pounce pg3200.txt +pounced pg3200.txt +pound pg3200.txt, pg100.txt +pound, pg100.txt +pound- pg100.txt +pound. pg3200.txt, pg100.txt +pound." pg3200.txt +pound? pg100.txt +pounded pg3200.txt +poundiferous pg3200.txt +pounding pg3200.txt +pounds pg31100.txt, pg3200.txt, pg100.txt +pounds! pg31100.txt +pounds) pg3200.txt +pounds, pg31100.txt, pg3200.txt +pounds. pg31100.txt, pg3200.txt, pg100.txt +pounds." pg31100.txt, pg3200.txt +pounds.' pg3200.txt +pounds.--ibid. pg3200.txt +pounds:" pg3200.txt +pounds?" pg31100.txt +pour pg31100.txt, pg3200.txt, pg100.txt +pour'd pg100.txt +poured pg31100.txt, pg3200.txt +poured. pg3200.txt +pouring pg3200.txt +pourra!) pg3200.txt +pours pg3200.txt +pours, pg3200.txt +poursuivant pg3200.txt +pout pg3200.txt +pouvait pg3200.txt +poverty pg31100.txt, pg3200.txt, pg100.txt +poverty, pg3200.txt, pg100.txt +poverty- pg100.txt +poverty-stricken pg3200.txt +poverty-stricken. pg3200.txt +poverty. pg31100.txt, pg3200.txt, pg100.txt +poverty: pg100.txt +poverty; pg3200.txt, pg100.txt +pow'ful pg3200.txt +pow'r pg100.txt +pow'r, pg100.txt +pow'r- pg100.txt +pow'r. pg100.txt +pow'r; pg100.txt +pow'r? pg100.txt +pow'rfull pg100.txt +pow'rfull, pg100.txt +pow'rs pg100.txt +pow'rs, pg100.txt +pow'rs. pg100.txt +pow'rs; pg100.txt +pow'rs? pg100.txt +pow- pg3200.txt +pow-wow. pg3200.txt +pow-wows, pg3200.txt +powder pg3200.txt +powder, pg3200.txt, pg100.txt +powder-can pg3200.txt +powder. pg3200.txt +powderhorn, pg3200.txt +power pg31100.txt, pg3200.txt, pg100.txt +power! pg100.txt +power!" pg3200.txt +power, pg31100.txt, pg3200.txt, pg100.txt +power,- pg100.txt +power- pg100.txt +power--" pg31100.txt +power. pg31100.txt, pg3200.txt, pg100.txt +power." pg31100.txt, pg3200.txt +power; pg31100.txt, pg3200.txt, pg100.txt +power? pg3200.txt, pg100.txt +power?" pg3200.txt +powerful pg3200.txt +powerful. pg3200.txt +powerfulest pg3200.txt +powerless, pg31100.txt +powerless. pg3200.txt +powers pg31100.txt, pg3200.txt, pg100.txt +powers! pg3200.txt, pg100.txt +powers, pg31100.txt, pg3200.txt, pg100.txt +powers. pg31100.txt, pg3200.txt, pg100.txt +powers." pg31100.txt +powers: pg3200.txt +powers; pg31100.txt, pg100.txt +powwow pg3200.txt +powwow, pg3200.txt +poysam pg100.txt +prabbles, pg100.txt +prabbles? pg100.txt +practicable?" pg31100.txt +practicably pg3200.txt +practical pg3200.txt +practicality pg3200.txt +practically pg3200.txt +practically. pg3200.txt +practically?" pg3200.txt +practice pg31100.txt, pg3200.txt, pg100.txt +practice, pg3200.txt, pg100.txt +practice- pg100.txt +practice--i pg3200.txt +practice. pg31100.txt, pg3200.txt, pg100.txt +practice." pg3200.txt +practice.--m. pg3200.txt +practice: pg3200.txt +practice; pg3200.txt, pg100.txt +practiced pg3200.txt +practiced, pg3200.txt +practiced; pg3200.txt +practicer pg100.txt +practices pg100.txt +practices! pg100.txt +practices. pg3200.txt, pg100.txt +practices." pg3200.txt +practices; pg100.txt +practicing pg3200.txt +practis'd pg100.txt +practis'd? pg100.txt +practisants; pg100.txt +practise pg3200.txt, pg100.txt +practise. pg31100.txt, pg100.txt +practised pg31100.txt, pg3200.txt +practisers, pg100.txt +practises pg3200.txt +practising pg31100.txt, pg3200.txt +practitioner pg3200.txt +practitioner-- pg3200.txt +praemunire- pg100.txt +prague, pg3200.txt, pg100.txt +prague," pg3200.txt +prah. pg3200.txt +prain, pg100.txt +prain. pg100.txt +prains pg100.txt +prairie pg3200.txt +prairie-dog pg3200.txt +prairie-fire, pg3200.txt +prairies pg3200.txt +prairies. pg3200.txt +prairies? pg3200.txt +prais'd pg100.txt +prais'd, pg100.txt +praise pg31100.txt, pg3200.txt, pg100.txt +praise! pg100.txt +praise, pg31100.txt, pg3200.txt, pg100.txt +praise. pg31100.txt, pg3200.txt, pg100.txt +praise." pg31100.txt +praise.' pg3200.txt +praise; pg31100.txt, pg100.txt +praise? pg31100.txt, pg100.txt +praised pg31100.txt, pg3200.txt +praised! pg100.txt +praised!--but pg31100.txt +praised!--then pg31100.txt +praised, pg31100.txt +praised. pg100.txt +praised; pg31100.txt, pg3200.txt +praisefully pg3200.txt +praises pg31100.txt, pg3200.txt, pg100.txt +praises! pg100.txt +praises, pg3200.txt +praises. pg3200.txt, pg100.txt +praises; pg100.txt +praiseworthy. pg100.txt +praising pg31100.txt, pg3200.txt, pg100.txt +praising. pg31100.txt +pranced pg3200.txt +prancing, pg3200.txt +pranks pg100.txt +pranks, pg100.txt +pranks. pg100.txt +prate pg3200.txt, pg100.txt +prate! pg100.txt +prate, pg100.txt +prate; pg100.txt +prated pg100.txt +prates pg3200.txt +prating pg100.txt +pratt pg3200.txt +pratt's, pg31100.txt +pratt, pg3200.txt +pratt: pg3200.txt +pratt?" pg31100.txt +prattle pg100.txt +prattle." pg3200.txt +pray pg31100.txt, pg3200.txt, pg100.txt +pray!" pg3200.txt +pray'd pg100.txt +pray'd, pg100.txt +pray'r pg100.txt +pray'r. pg100.txt +pray'rs, pg100.txt +pray, pg31100.txt, pg3200.txt, pg100.txt +pray. pg3200.txt, pg100.txt +pray." pg3200.txt +pray: pg100.txt +pray? pg100.txt +pray?" pg31100.txt, pg3200.txt +prayed pg3200.txt +prayed. pg3200.txt +prayed?" pg3200.txt +prayer pg3200.txt, pg100.txt +prayer! pg100.txt +prayer, pg3200.txt, pg100.txt +prayer--" pg3200.txt +prayer-amended pg3200.txt +prayer-book pg100.txt +prayer-books pg3200.txt +prayer-meeting pg3200.txt +prayer-meeting; pg3200.txt +prayer-time. pg3200.txt +prayer-wheel pg3200.txt +prayer. pg3200.txt, pg100.txt +prayer." pg3200.txt +prayer: pg3200.txt +prayer; pg3200.txt, pg100.txt +prayer? pg100.txt +prayerful pg3200.txt +prayerfully: pg3200.txt +prayers pg31100.txt, pg3200.txt, pg100.txt +prayers! pg100.txt +prayers, pg3200.txt, pg100.txt +prayers- pg100.txt +prayers. pg3200.txt, pg100.txt +prayers; pg100.txt +prayers?" pg3200.txt +prayeth pg3200.txt +prayeth." pg3200.txt +praying pg3200.txt, pg100.txt +praying, pg3200.txt +praying-machine pg3200.txt +praying; pg100.txt +prays pg100.txt +pre- pg3200.txt +pre-arranged pg31100.txt +pre-arrangement pg31100.txt +pre-contract. pg100.txt +pre-dominance; pg100.txt +pre-eminence. pg31100.txt +pre-eminent pg3200.txt +pre-surmise pg100.txt +preach pg31100.txt, pg3200.txt, pg100.txt +preach, pg3200.txt +preach?" pg3200.txt +preached pg3200.txt +preached. pg3200.txt +preacher pg31100.txt, pg3200.txt +preacher, pg3200.txt +preacher. pg3200.txt +preacher." pg3200.txt +preacher; pg3200.txt +preachers pg3200.txt +preachers, pg3200.txt +preachers. pg3200.txt +preachers; pg3200.txt +preaches. pg3200.txt +preaching, pg3200.txt +preaching--all pg3200.txt +preaching. pg3200.txt +preaching? pg3200.txt +preamble pg3200.txt +prearranged. pg3200.txt +prearranged; pg31100.txt +preassured pg31100.txt +precarious pg3200.txt +precarious. pg3200.txt +precaution pg3200.txt +precautions pg3200.txt +precautions. pg3200.txt +precede pg31100.txt, pg3200.txt +precedence pg3200.txt +precedence. pg31100.txt, pg3200.txt +precedent pg3200.txt, pg100.txt +precedent! pg100.txt +precedent, pg100.txt +precedent. pg3200.txt, pg100.txt +precedents pg3200.txt +precedents, pg3200.txt +precedents. pg3200.txt +precedes pg3200.txt +preceding pg31100.txt, pg3200.txt, pg100.txt +preceding, pg31100.txt +precepts pg100.txt +precinct pg100.txt +precincts pg3200.txt +precious pg31100.txt, pg3200.txt, pg100.txt +precious, pg3200.txt, pg100.txt +precious--pray pg3200.txt +precious. pg3200.txt +precious." pg3200.txt +precious; pg3200.txt +preciously pg3200.txt +preciously. pg100.txt +precipice pg3200.txt +precipice! pg3200.txt +precipice, pg3200.txt +precipice. pg3200.txt +precipice; pg3200.txt +precipice] pg3200.txt +precipices pg3200.txt +precipices, pg3200.txt +precipices. pg3200.txt +precipitate pg31100.txt, pg3200.txt +precipitate. pg31100.txt +precipitated pg3200.txt +precipitating, pg100.txt +precipitation pg31100.txt, pg100.txt +precipitous pg3200.txt +precise, pg3200.txt, pg100.txt +precise; pg100.txt +precisely pg31100.txt, pg3200.txt +precisely, pg3200.txt, pg100.txt +precisely. pg3200.txt +precision pg3200.txt +precision, pg31100.txt +precision. pg3200.txt +precludes pg31100.txt +precocity. pg3200.txt +precursors pg100.txt +predecessor pg3200.txt +predecessor, pg3200.txt +predecessor; pg3200.txt +predecessors pg3200.txt, pg100.txt +predecessors, pg3200.txt +predecessors. pg3200.txt +predecessors." pg3200.txt +predecessors; pg3200.txt +predecessors? pg31100.txt +predicament pg3200.txt, pg100.txt +predicament! pg100.txt +predicator pg3200.txt +predict pg31100.txt +predicted. pg31100.txt +prediction pg3200.txt, pg100.txt +prediction, pg31100.txt +predictions pg31100.txt, pg100.txt +predilections pg3200.txt +predominant, pg100.txt +predominant. pg100.txt +predominated. pg31100.txt +predominates pg3200.txt +predominates, pg3200.txt +preeches. pg100.txt +preeminent pg31100.txt +preeminently pg31100.txt +preface pg3200.txt +preface. pg3200.txt +preface.--we pg31100.txt +prefaced pg31100.txt +prefatory. pg3200.txt +prefer pg31100.txt, pg3200.txt, pg100.txt +prefer, pg31100.txt +prefer. pg3200.txt +prefer." pg3200.txt +prefer; pg100.txt +prefer? pg31100.txt, pg3200.txt +prefer?" pg3200.txt +preferable pg3200.txt +preferable. pg31100.txt +preferable.) pg3200.txt +preferably pg3200.txt +preference pg31100.txt, pg3200.txt +preference, pg31100.txt +preference. pg31100.txt, pg3200.txt +preference." pg31100.txt, pg3200.txt +preferences pg3200.txt +preferment pg100.txt +preferment, pg31100.txt, pg100.txt +preferment. pg31100.txt, pg3200.txt, pg100.txt +preferment: pg3200.txt +preferment; pg31100.txt +preferments pg100.txt +preferments, pg100.txt +preferr'd pg100.txt +preferr'd; pg100.txt +preferred pg31100.txt, pg3200.txt +preferred. pg3200.txt +preferred; pg31100.txt +preferring pg3200.txt +prefers pg3200.txt +prefiguring, pg100.txt +prefix'd pg100.txt +pregnant pg3200.txt, pg100.txt +pregnant! pg100.txt +pregnant, pg100.txt +pregnant; pg100.txt +prehistoric pg3200.txt +prejudice pg31100.txt, pg3200.txt +prejudice, pg3200.txt +prejudice--for pg3200.txt +prejudice. pg3200.txt +prejudice?" pg31100.txt +prejudice_ pg31100.txt +prejudice_), pg31100.txt +prejudiced pg3200.txt +prejudiced, pg31100.txt, pg3200.txt +prejudiced. pg3200.txt +prejudiced." pg31100.txt +prejudices pg3200.txt +prejudices, pg3200.txt +prejudices. pg3200.txt +prelate pg100.txt +prelate, pg100.txt +prelate; pg100.txt +prelates, pg3200.txt +preliminaries pg3200.txt +preliminaries, pg3200.txt +preliminaries. pg3200.txt +preliminaries." pg3200.txt +preliminary pg3200.txt +prelude pg31100.txt +premature pg3200.txt +premature. pg31100.txt, pg3200.txt +premature." pg3200.txt +prematurely pg3200.txt +prematurely. pg3200.txt +premeditation, pg31100.txt, pg3200.txt +premier. pg3200.txt +premise pg31100.txt +premises pg31100.txt, pg3200.txt +premises! pg3200.txt +premises, pg31100.txt, pg3200.txt, pg100.txt +premises--said pg3200.txt +premises. pg3200.txt +premises." pg3200.txt +premises; pg31100.txt +premium pg3200.txt +premium. pg3200.txt +premonitory pg3200.txt +prenait pg3200.txt +prentices. pg100.txt +prepaid. pg3200.txt +prepar'd pg100.txt +prepar'd. pg100.txt +prepar'd; pg100.txt +preparation pg31100.txt, pg3200.txt, pg100.txt +preparation), pg3200.txt +preparation, pg31100.txt, pg3200.txt, pg100.txt +preparation. pg31100.txt, pg3200.txt, pg100.txt +preparations pg31100.txt, pg3200.txt +preparations, pg100.txt +preparations. pg3200.txt, pg100.txt +preparatory pg31100.txt, pg3200.txt +prepare pg31100.txt, pg3200.txt, pg100.txt +prepare! pg100.txt +prepare, pg100.txt +prepare. pg100.txt +prepared pg31100.txt, pg3200.txt, pg100.txt +prepared, pg31100.txt, pg3200.txt +prepared-- pg3200.txt +prepared--acclimatized, pg3200.txt +prepared--and pg3200.txt +prepared. pg31100.txt, pg3200.txt, pg100.txt +prepared." pg3200.txt +preparing pg31100.txt, pg3200.txt +preparing. pg3200.txt +preponderance pg3200.txt +preponderated, pg31100.txt +prepossessing, pg31100.txt +prepossessing. pg31100.txt +prepossession; pg31100.txt +prepost'rously. pg100.txt +preposterous pg3200.txt, pg100.txt +preposterous. pg3200.txt +preposterous; pg3200.txt +preposterously pg3200.txt, pg100.txt +preposterously. pg100.txt +prerogative pg100.txt +prerogative; pg100.txt +pres- pg3200.txt +presage, pg100.txt +presage. pg100.txt +presbyterian pg3200.txt +presbyterian, pg3200.txt +presbyterian. pg3200.txt +presbyterian." pg3200.txt +presbyterians pg3200.txt +presbyterians? pg3200.txt +prescience pg100.txt +prescott pg31100.txt +prescri--" pg3200.txt +prescribed pg31100.txt +prescribed, pg3200.txt +prescriptions pg100.txt +presence pg31100.txt, pg3200.txt, pg100.txt +presence, pg31100.txt, pg3200.txt, pg100.txt +presence," pg3200.txt +presence--kindly, pg3200.txt +presence--mile pg3200.txt +presence. pg3200.txt, pg100.txt +presence." pg3200.txt +presence; pg3200.txt +presence? pg100.txt +presence?' pg3200.txt +present pg31100.txt, pg3200.txt, pg100.txt +present, pg31100.txt, pg3200.txt, pg100.txt +present," pg31100.txt +present- pg100.txt +present--and pg3200.txt +present--but pg3200.txt +present--complexions--advantages pg3200.txt +present--for pg3200.txt +present--to pg3200.txt +present. pg31100.txt, pg3200.txt, pg100.txt +present." pg31100.txt, pg3200.txt +present; pg31100.txt, pg3200.txt, pg100.txt +present? pg3200.txt, pg100.txt +present?" pg3200.txt +presentable. pg3200.txt +presentation, pg31100.txt +presentation; pg31100.txt +presented pg31100.txt, pg3200.txt +presented, pg3200.txt, pg100.txt +presented. pg3200.txt +presented." pg3200.txt +presented: pg3200.txt +presenter pg100.txt +presenteth. pg100.txt +presentiment pg31100.txt, pg3200.txt +presentiment, pg31100.txt +presentiments pg31100.txt +presenting pg3200.txt +presently pg31100.txt, pg3200.txt, pg100.txt +presently, pg31100.txt, pg3200.txt, pg100.txt +presently--and pg3200.txt +presently--that pg3200.txt +presently. pg3200.txt, pg100.txt +presently." pg31100.txt, pg3200.txt +presently: pg3200.txt, pg100.txt +presently; pg31100.txt, pg3200.txt, pg100.txt +presently? pg100.txt +presentment pg3200.txt +presents pg3200.txt, pg100.txt +presents'- pg100.txt +presents, pg3200.txt +presents. pg3200.txt, pg100.txt +preserv'd pg100.txt +preservation pg31100.txt, pg3200.txt, pg100.txt +preservation. pg3200.txt +preservation; pg100.txt +preserve pg31100.txt, pg3200.txt, pg100.txt +preserve!" pg3200.txt +preserved pg31100.txt, pg3200.txt +preserved, pg3200.txt +preserved. pg3200.txt +preserved." pg31100.txt +preserved; pg3200.txt +preserver pg3200.txt +preservers pg3200.txt +preserves pg3200.txt +preserves, pg31100.txt +preserving pg3200.txt +preserving, pg31100.txt, pg3200.txt +preserving. pg3200.txt +preside pg31100.txt +preside, pg31100.txt +presided pg31100.txt, pg3200.txt +presided, pg3200.txt +presidency. pg3200.txt +president pg3200.txt +president! pg3200.txt +president, pg3200.txt +president--is pg3200.txt +president. pg3200.txt +president." pg3200.txt +president; pg3200.txt +presidential pg3200.txt +presidential- pg3200.txt +presiding pg31100.txt, pg3200.txt +presiding; pg31100.txt +press pg31100.txt, pg3200.txt, pg100.txt +press'd pg100.txt +press'd! pg100.txt +press'd; pg100.txt +press, pg3200.txt, pg100.txt +press,' pg3200.txt +press-work, pg3200.txt +press. pg3200.txt +press." pg3200.txt +press: pg3200.txt +press] pg31100.txt +pressed pg31100.txt, pg3200.txt +presses pg3200.txt +pressing pg31100.txt, pg3200.txt +pressing, pg31100.txt +pressing; pg31100.txt +pressingly pg31100.txt +pressingly. pg31100.txt +pressure pg31100.txt, pg3200.txt +pressure--no, pg3200.txt +prest pg100.txt +presumably pg3200.txt +presume pg3200.txt, pg100.txt +presume, pg100.txt +presume," pg31100.txt +presume. pg100.txt +presume?" pg31100.txt, pg3200.txt +presumed pg3200.txt +presumption pg31100.txt, pg3200.txt, pg100.txt +presumption-tadpole pg3200.txt +presumption. pg100.txt +presumptuous pg3200.txt +presuppos'd pg100.txt +pretence pg31100.txt, pg3200.txt, pg100.txt +pretence. pg100.txt +pretend pg31100.txt, pg3200.txt, pg100.txt +pretend, pg3200.txt +pretend? pg100.txt +pretended pg31100.txt, pg3200.txt +pretended, pg3200.txt +pretending pg31100.txt, pg3200.txt, pg100.txt +pretends pg3200.txt +pretense pg3200.txt +pretense-- pg3200.txt +pretenses. pg3200.txt +pretension, pg31100.txt +pretensions pg31100.txt, pg3200.txt +pretensions, pg31100.txt, pg3200.txt +pretentiousness. pg3200.txt +pretext pg3200.txt +pretia. pg100.txt +pretoria. pg3200.txt +prettier pg31100.txt, pg3200.txt +prettiest pg3200.txt, pg100.txt +prettiest? pg31100.txt +prettily pg100.txt +prettily. pg31100.txt, pg100.txt +prettily; pg3200.txt +prettiness. pg100.txt +pretty pg31100.txt, pg3200.txt, pg100.txt +pretty!" pg3200.txt +pretty, pg31100.txt, pg3200.txt, pg100.txt +pretty--not pg31100.txt +pretty-behaved pg31100.txt +pretty-spoken pg31100.txt +pretty. pg31100.txt, pg3200.txt +pretty." pg31100.txt +pretty; pg100.txt +pretty? pg100.txt +pretty?" pg31100.txt +prevail pg31100.txt, pg3200.txt, pg100.txt +prevail'd pg100.txt +prevail'd, pg100.txt +prevail'd. pg100.txt +prevail, pg31100.txt, pg100.txt +prevail. pg3200.txt +prevail; pg3200.txt +prevail? pg100.txt +prevail?" pg3200.txt +prevailed pg31100.txt, pg3200.txt +prevailed, pg31100.txt, pg3200.txt +prevailed. pg31100.txt, pg3200.txt +prevailed." pg3200.txt +prevailed? pg100.txt +prevailing pg31100.txt, pg3200.txt +prevailing, pg31100.txt +prevailing. pg31100.txt +prevails pg100.txt +prevails. pg100.txt +prevalence pg3200.txt +prevalent pg31100.txt, pg3200.txt +prevalent-- pg3200.txt +prevarication. pg3200.txt +prevent pg31100.txt, pg3200.txt, pg100.txt +prevent, pg31100.txt, pg100.txt +prevent--it pg3200.txt +prevent. pg100.txt +preventative pg3200.txt +prevented pg31100.txt, pg3200.txt, pg100.txt +prevented! pg100.txt +prevented, pg100.txt +prevented- pg100.txt +prevented. pg31100.txt, pg100.txt +prevented; pg3200.txt +prevented? pg100.txt +preventing pg31100.txt, pg3200.txt +prevention, pg100.txt +prevention. pg3200.txt, pg100.txt +prevention? pg100.txt +preventions, pg100.txt +preventives; pg3200.txt +prevents pg3200.txt, pg100.txt +previous pg31100.txt, pg3200.txt +previously pg31100.txt, pg3200.txt +previously, pg3200.txt +prey pg3200.txt, pg100.txt +prey, pg3200.txt, pg100.txt +prey--yes, pg3200.txt +prey. pg31100.txt, pg3200.txt, pg100.txt +prey; pg100.txt +priam pg100.txt +priam's pg100.txt +priam's. pg100.txt +priam, pg100.txt +priam. pg100.txt +priamus. pg100.txt +price pg31100.txt, pg3200.txt, pg100.txt +price! pg3200.txt +price's pg31100.txt +price, pg31100.txt, pg3200.txt, pg100.txt +price," pg31100.txt +price. pg31100.txt, pg3200.txt, pg100.txt +price." pg31100.txt, pg3200.txt +price; pg31100.txt, pg100.txt +price? pg31100.txt +priceless pg3200.txt +prices pg3200.txt +prices, pg3200.txt +prices. pg3200.txt +pricing pg3200.txt +prick pg100.txt +prick'd. pg100.txt +prick'd? pg100.txt +prick. pg100.txt +prick? pg100.txt +pricked pg3200.txt +pricket pg100.txt +pricket. pg100.txt +prickly. pg3200.txt +pricks pg100.txt +pride pg31100.txt, pg3200.txt, pg100.txt +pride! pg100.txt +pride, pg31100.txt, pg3200.txt, pg100.txt +pride-- pg3200.txt +pride. pg31100.txt, pg3200.txt, pg100.txt +pride." pg31100.txt +pride; pg31100.txt, pg100.txt +pride? pg100.txt +prides, pg3200.txt +pridge pg100.txt +pridge. pg100.txt +pridge; pg100.txt +priest pg3200.txt, pg100.txt +priest!" pg3200.txt +priest, pg3200.txt, pg100.txt +priest-like, pg100.txt +priest. pg3200.txt, pg100.txt +priest: pg3200.txt, pg100.txt +priest; pg3200.txt, pg100.txt +priest? pg100.txt +priest?" pg3200.txt +priesthood: pg3200.txt +priests pg3200.txt, pg100.txt +priests, pg3200.txt +priests--always pg3200.txt +priests. pg3200.txt +priests." pg3200.txt +prim pg3200.txt +primal pg3200.txt +primarily pg3200.txt +primary; pg3200.txt +prime pg31100.txt, pg3200.txt +prime, pg3200.txt, pg100.txt +prime. pg3200.txt, pg100.txt +primeaveal pg3200.txt +primer'. pg3200.txt +primer--for pg3200.txt +primer-class pg3200.txt +primero pg100.txt +primers. pg3200.txt +primeval pg3200.txt +primitive pg3200.txt +primroses, pg100.txt +prince pg31100.txt, pg3200.txt, pg100.txt +prince! pg100.txt +prince!" pg3200.txt +prince's pg3200.txt, pg100.txt +prince's. pg100.txt +prince, pg3200.txt, pg100.txt +prince," pg3200.txt +prince-- pg3200.txt +prince--and pg3200.txt +prince--human pg3200.txt +prince. pg3200.txt, pg100.txt +prince." pg3200.txt +prince; pg3200.txt, pg100.txt +prince? pg100.txt +prince?" pg3200.txt +princedoms pg3200.txt +princeling pg3200.txt +princely pg3200.txt +princely; pg100.txt +princes pg3200.txt, pg100.txt +princes! pg100.txt +princes' pg100.txt +princes, pg3200.txt, pg100.txt +princes- pg100.txt +princes--who pg3200.txt +princes. pg3200.txt, pg100.txt +princess pg3200.txt, pg100.txt +princess! pg100.txt +princess, pg3200.txt, pg100.txt +princess- pg100.txt +princess. pg3200.txt, pg100.txt +princess? pg100.txt +princesses. pg3200.txt +princesses; pg3200.txt +princeton pg3200.txt +principal pg31100.txt, pg3200.txt +principal's pg3200.txt +principal, pg31100.txt, pg100.txt +principal. pg3200.txt +principal? pg100.txt +principalities. pg100.txt +principality, pg100.txt +principally pg31100.txt, pg3200.txt +principally. pg3200.txt +principessa pg3200.txt +principle pg31100.txt, pg3200.txt +principle, pg31100.txt, pg3200.txt +principle- pg100.txt +principle. pg31100.txt, pg3200.txt +principle." pg31100.txt, pg3200.txt +principle.' pg3200.txt +principles pg31100.txt, pg3200.txt, pg100.txt +principles, pg31100.txt, pg3200.txt +principles. pg31100.txt, pg3200.txt +prings pg100.txt +print pg3200.txt +print!" pg100.txt +print" pg3200.txt +print, pg31100.txt, pg3200.txt +print--was pg3200.txt +print. pg3200.txt +print." pg3200.txt +print: pg3200.txt +print?" pg3200.txt +printed pg31100.txt, pg3200.txt, pg100.txt +printed) pg3200.txt +printed). pg3200.txt +printed, pg3200.txt +printed. pg3200.txt +printed.--m.t.] pg3200.txt +printed; pg3200.txt +printed?" pg3200.txt +printer pg3200.txt +printer's pg31100.txt, pg3200.txt +printer, pg3200.txt +printer. pg3200.txt +printer? pg3200.txt +printers, pg3200.txt +printers. pg3200.txt +printing pg3200.txt, pg100.txt +printing-office pg3200.txt +printing-office--horse pg3200.txt +printing. pg3200.txt +prints pg31100.txt, pg3200.txt +prints. pg100.txt +prior pg31100.txt +prioress pg3200.txt +prioress; pg100.txt +priority. pg100.txt +priory pg100.txt +priscian pg100.txt +prism-fringed pg3200.txt +prisms pg3200.txt +prison pg31100.txt, pg3200.txt, pg100.txt +prison! pg100.txt +prison!" pg3200.txt +prison's pg3200.txt +prison, pg3200.txt, pg100.txt +prison--a pg3200.txt +prison--quite pg31100.txt +prison-house, pg3200.txt +prison-house. pg3200.txt +prison. pg3200.txt, pg100.txt +prison." pg31100.txt, pg3200.txt +prison; pg100.txt +prison? pg100.txt +prison?" pg3200.txt +prisoner pg3200.txt, pg100.txt +prisoner! pg100.txt +prisoner's pg3200.txt +prisoner, pg3200.txt, pg100.txt +prisoner. pg3200.txt, pg100.txt +prisoner." pg3200.txt +prisoner; pg3200.txt, pg100.txt +prisoner? pg3200.txt, pg100.txt +prisoner?" pg3200.txt +prisoners pg3200.txt, pg100.txt +prisoners, pg3200.txt, pg100.txt +prisoners- pg100.txt +prisoners. pg3200.txt, pg100.txt +prisoners; pg3200.txt, pg100.txt +prisoners? pg100.txt +prisoners?" pg3200.txt +prisonment. pg100.txt +prisons; pg100.txt +prithe pg100.txt +prithee pg3200.txt, pg100.txt +prithee, pg3200.txt, pg100.txt +prithee. pg100.txt +prithee.' pg100.txt +prithee; pg100.txt +prithee? pg100.txt +privacy pg31100.txt, pg3200.txt, pg100.txt +privacy, pg3200.txt +privacy--steamboat pg3200.txt +privacy." pg3200.txt +privacy; pg3200.txt +private pg31100.txt, pg3200.txt, pg100.txt +private, pg3200.txt, pg100.txt +private--nothing pg3200.txt +private. pg31100.txt, pg3200.txt, pg100.txt +private." pg3200.txt +private? pg3200.txt, pg100.txt +privately pg3200.txt +privately, pg3200.txt, pg100.txt +privately. pg3200.txt, pg100.txt +privately." pg3200.txt +privately: pg3200.txt +privates pg3200.txt +privations pg31100.txt, pg3200.txt +privilage pg100.txt +privileg'd. pg100.txt +privilege pg31100.txt, pg3200.txt, pg100.txt +privilege, pg3200.txt, pg100.txt +privilege- pg100.txt +privilege. pg3200.txt, pg100.txt +privilege: pg100.txt +privilege; pg3200.txt +privileged pg31100.txt, pg3200.txt +privileged. pg3200.txt +privileges pg3200.txt +privileges, pg3200.txt +privileges--at pg3200.txt +privileges. pg3200.txt +privileges? pg100.txt +privilegio pg100.txt +privity, pg3200.txt +privy pg31100.txt, pg100.txt +priz'd pg100.txt +priz'd. pg100.txt +prize pg3200.txt, pg100.txt +prize! pg3200.txt, pg100.txt +prize, pg3200.txt, pg100.txt +prize- pg100.txt +prize-fighter pg3200.txt +prize-fights pg3200.txt +prize. pg3200.txt +prize." pg3200.txt +prize: pg100.txt +prize; pg100.txt +prize] pg3200.txt +prized pg31100.txt, pg3200.txt +prizes pg3200.txt +prizes, pg3200.txt +prizes. pg3200.txt +prizes; pg3200.txt +pro- pg3200.txt +probabilities pg31100.txt, pg3200.txt +probabilities, pg31100.txt +probabilities; pg3200.txt +probability pg31100.txt, pg3200.txt +probability, pg31100.txt, pg3200.txt +probability,--for pg31100.txt +probability--the pg31100.txt +probability. pg3200.txt +probability." pg31100.txt +probable pg31100.txt, pg3200.txt, pg100.txt +probable, pg31100.txt, pg100.txt +probable. pg31100.txt, pg3200.txt, pg100.txt +probably pg31100.txt, pg3200.txt +probably, pg31100.txt +probably. pg31100.txt, pg3200.txt +probate pg3200.txt +probation pg100.txt +probation), pg3200.txt +probation. pg100.txt +probes pg3200.txt +probing. pg3200.txt +probity. pg3200.txt +problem pg31100.txt, pg3200.txt +problem. pg31100.txt, pg3200.txt, pg100.txt +problem." pg3200.txt +problems, pg3200.txt +problems. pg3200.txt +procedure pg3200.txt +procedure; pg3200.txt +procedure? pg3200.txt +procedynges pg3200.txt +procedynges. pg3200.txt +proceed pg31100.txt, pg3200.txt, pg100.txt +proceed!" pg3200.txt +proceed, pg31100.txt, pg3200.txt, pg100.txt +proceed. pg31100.txt, pg3200.txt, pg100.txt +proceed." pg3200.txt +proceed: pg3200.txt +proceed; pg100.txt +proceed? pg3200.txt, pg100.txt +proceed?" pg3200.txt +proceeded pg31100.txt, pg3200.txt, pg100.txt +proceeded, pg3200.txt, pg100.txt +proceeded- pg100.txt +proceeded-- pg3200.txt +proceeded. pg31100.txt, pg3200.txt, pg100.txt +proceeded: pg3200.txt +proceeded; pg31100.txt +proceeding pg31100.txt, pg3200.txt, pg100.txt +proceeding! pg100.txt +proceeding, pg100.txt +proceeding. pg31100.txt, pg100.txt +proceedings pg3200.txt, pg100.txt +proceedings, pg31100.txt +proceedings. pg3200.txt, pg100.txt +proceedings; pg31100.txt +proceeds pg3200.txt, pg100.txt +proceeds. pg100.txt +proces." pg3200.txt +proces?" pg3200.txt +process pg31100.txt, pg3200.txt, pg100.txt +process, pg3200.txt, pg100.txt +process,--let pg3200.txt +process- pg100.txt +process--so pg31100.txt +process. pg3200.txt +process." pg3200.txt +process; pg3200.txt +process? pg3200.txt +processes pg3200.txt +processes. pg3200.txt +procession pg3200.txt, pg100.txt +procession" pg3200.txt +procession, pg3200.txt +procession--fill pg3200.txt +procession--pen pg3200.txt +procession. pg3200.txt +procession." pg3200.txt +procession: pg3200.txt +procession; pg3200.txt +procession? pg3200.txt +procession?' pg3200.txt +processional pg3200.txt +processions pg3200.txt +processions, pg3200.txt +processions. pg3200.txt +processors); pg100.txt +proclaim pg3200.txt, pg100.txt +proclaim'd pg100.txt +proclaim'd, pg100.txt +proclaim'd. pg100.txt +proclaim'd; pg100.txt +proclaim--" pg3200.txt +proclaim? pg100.txt +proclaimed pg31100.txt, pg3200.txt, pg100.txt +proclaimed." pg3200.txt +proclaimed? pg100.txt +proclaiming pg3200.txt +proclaims pg3200.txt +proclamation pg3200.txt, pg100.txt +proclamation! pg100.txt +proclamation, pg3200.txt +proclamation-- pg3200.txt +proclamation. pg100.txt +proclamation." pg3200.txt +proclamation; pg100.txt +proclamation? pg100.txt +proclamation?" pg3200.txt +proclamations, pg3200.txt, pg100.txt +procrastinated pg3200.txt +procreation pg3200.txt +proculeius, pg100.txt +proculeius. pg100.txt +proculeius? pg100.txt +proculus pg3200.txt +procur'd, pg100.txt +procurable. pg3200.txt +procure pg31100.txt, pg3200.txt, pg100.txt +procure, pg100.txt +procure; pg100.txt +procured pg31100.txt, pg3200.txt +procured, pg31100.txt +procured. pg100.txt +procured: pg3200.txt +procuring pg31100.txt +procuring. pg31100.txt +prodigal pg3200.txt, pg100.txt +prodigal, pg100.txt +prodigal. pg100.txt +prodigal; pg100.txt +prodigality. pg3200.txt +prodigies pg3200.txt, pg100.txt +prodigies, pg100.txt +prodigious pg31100.txt, pg3200.txt +prodigious, pg100.txt +prodigious. pg3200.txt +prodigiously pg31100.txt, pg3200.txt +prodigiously, pg3200.txt +prodigiously." pg31100.txt +prodigy pg3200.txt +prodigy, pg3200.txt, pg100.txt +prodigy; pg100.txt +prodigy? pg100.txt +proditor, pg100.txt +produc'd pg100.txt +produce pg31100.txt, pg3200.txt, pg100.txt +produce), pg3200.txt +produce. pg31100.txt, pg3200.txt, pg100.txt +produced pg31100.txt, pg3200.txt +produced). pg3200.txt +produced, pg31100.txt, pg3200.txt +produced. pg31100.txt, pg3200.txt +produced; pg31100.txt +produces pg3200.txt +producing pg31100.txt, pg3200.txt +product pg3200.txt +product, pg3200.txt +product. pg3200.txt +product; pg3200.txt +production pg31100.txt, pg3200.txt +production, pg31100.txt, pg3200.txt, pg100.txt +production. pg3200.txt +productions pg31100.txt, pg3200.txt +productive pg31100.txt +products pg3200.txt +products, pg3200.txt +profan'd, pg100.txt +profanation pg3200.txt +profanation. pg3200.txt, pg100.txt +profane pg3200.txt, pg100.txt +profane. pg100.txt +profane; pg100.txt +profaned pg3200.txt +profanity pg3200.txt +profanity, pg3200.txt +profanity. pg3200.txt +profanity." pg3200.txt +profanity.' pg3200.txt +profanity: pg3200.txt +profess pg100.txt +profess'd pg100.txt +profess'd, pg100.txt +profess, pg100.txt +profess. pg100.txt +professed pg31100.txt, pg3200.txt +professedly pg3200.txt +professes pg3200.txt, pg100.txt +professing pg3200.txt +profession pg31100.txt, pg3200.txt, pg100.txt +profession, pg31100.txt, pg3200.txt, pg100.txt +profession--i pg31100.txt +profession. pg31100.txt, pg3200.txt, pg100.txt +profession." pg31100.txt, pg3200.txt +profession; pg31100.txt, pg3200.txt +profession?" pg3200.txt +professional pg3200.txt +professionals pg3200.txt +professionals. pg3200.txt +professions pg31100.txt, pg3200.txt +professions, pg31100.txt +professions." pg31100.txt +professor pg3200.txt +professor" pg3200.txt +professor's pg3200.txt +professor. pg3200.txt +professor; pg3200.txt +professors pg3200.txt +professors. pg3200.txt +proffer. pg100.txt +proffered. pg3200.txt +proficiency pg3200.txt +proficiency. pg31100.txt +proficient pg31100.txt, pg3200.txt +proficient." pg31100.txt +profile pg31100.txt +profile), pg3200.txt +profile. pg3200.txt +profit pg31100.txt, pg3200.txt, pg100.txt +profit! pg3200.txt +profit, pg3200.txt +profit. pg3200.txt, pg100.txt +profit; pg3200.txt +profit? pg3200.txt +profit?" pg3200.txt +profit?'--edward pg3200.txt +profitable pg31100.txt, pg3200.txt +profitable. pg3200.txt +profitable: pg31100.txt +profitable; pg3200.txt +profitably pg3200.txt +profitably! pg100.txt +profited pg100.txt +profited? pg100.txt +profitless pg100.txt +profitless. pg100.txt +profits pg3200.txt, pg100.txt +profits, pg100.txt +profits. pg3200.txt +profligate pg31100.txt +profound pg3200.txt, pg100.txt +profound, pg3200.txt, pg100.txt +profound. pg3200.txt +profound; pg100.txt +profoundest pg3200.txt +profoundly pg3200.txt +profundity pg3200.txt +profusion pg31100.txt +progenitors, pg100.txt +progenitors. pg100.txt +progeny pg100.txt +progeny, pg100.txt +progeny. pg100.txt +prognostic pg3200.txt +prognosticate, pg100.txt +prognostication, pg100.txt +prognostics pg31100.txt +program pg3200.txt +program, pg3200.txt +programme pg3200.txt +programme, pg3200.txt +programme. pg3200.txt +programme: pg3200.txt +programme?" pg3200.txt +progress pg31100.txt, pg3200.txt, pg100.txt +progress!" pg3200.txt +progress, pg31100.txt, pg3200.txt +progress. pg3200.txt +progress; pg3200.txt +progressed pg3200.txt +progressed, pg3200.txt +progressing pg3200.txt +progression pg3200.txt +progressive pg3200.txt +prohibited." pg31100.txt +prohibition pg31100.txt, pg3200.txt, pg100.txt +prohibition. pg3200.txt +prohibitionist, pg3200.txt +project pg31100.txt, pg3200.txt, pg100.txt +project, pg3200.txt, pg100.txt +project. pg31100.txt, pg3200.txt, pg100.txt +project.) pg3200.txt +project: pg3200.txt +project; pg3200.txt +project?" pg3200.txt +projected pg31100.txt, pg3200.txt +projecting pg31100.txt, pg3200.txt +projection pg100.txt +projections pg3200.txt +projects pg3200.txt +projects, pg3200.txt +projects. pg3200.txt +projects; pg31100.txt +prolixity pg100.txt +prolixity. pg100.txt +prologue pg31100.txt, pg100.txt +prologue, pg100.txt +prologue. pg100.txt +prologues pg100.txt +prologues. pg100.txt +prolong pg3200.txt +prolong'd, pg100.txt +prolong'd. pg100.txt +prolongation pg31100.txt +prolonged pg3200.txt +promenade pg3200.txt +promenading pg3200.txt +prominence pg3200.txt +prominence, pg3200.txt +prominence. pg3200.txt +prominent pg3200.txt +prominent, pg31100.txt +prominently pg31100.txt, pg3200.txt, pg100.txt +promis'd pg100.txt +promis'd, pg100.txt +promis, pg100.txt +promise pg31100.txt, pg3200.txt, pg100.txt +promise! pg3200.txt +promise!" pg3200.txt +promise!' pg3200.txt +promise, pg31100.txt, pg3200.txt, pg100.txt +promise--would pg31100.txt +promise-breach, pg100.txt +promise-breaker. pg100.txt +promise-keeping. pg100.txt +promise. pg31100.txt, pg3200.txt, pg100.txt +promise." pg31100.txt, pg3200.txt +promise; pg31100.txt, pg100.txt +promise?" pg3200.txt +promised pg31100.txt, pg3200.txt, pg100.txt +promised, pg31100.txt, pg3200.txt, pg100.txt +promised--and pg31100.txt +promised--that pg3200.txt +promised. pg3200.txt, pg100.txt +promised? pg100.txt +promised?" pg3200.txt +promises pg31100.txt, pg3200.txt, pg100.txt +promises, pg31100.txt, pg3200.txt, pg100.txt +promises--if pg3200.txt +promises. pg3200.txt, pg100.txt +promises; pg3200.txt +promiseth pg100.txt +promising pg31100.txt, pg3200.txt, pg100.txt +promising--till pg31100.txt +promising. pg31100.txt, pg3200.txt +promising." pg3200.txt +promontories pg3200.txt +promontory pg100.txt +promontory, pg100.txt +promote pg31100.txt +promote, pg31100.txt +promoted pg3200.txt +promoted.' pg3200.txt +promoter pg3200.txt +promotes pg31100.txt +promoting pg31100.txt +promotion pg3200.txt +promotion! pg31100.txt +promotion, pg3200.txt, pg100.txt +promotion. pg31100.txt +promotions pg3200.txt, pg100.txt +promotions, pg3200.txt, pg100.txt +prompt pg31100.txt, pg3200.txt, pg100.txt +prompt, pg3200.txt +prompt. pg3200.txt +prompt: pg3200.txt +prompted pg31100.txt, pg3200.txt +prompted. pg31100.txt, pg3200.txt +prompter. pg31100.txt, pg100.txt +prompting pg31100.txt +promptitude pg31100.txt +promptitude. pg31100.txt +promptly pg3200.txt +promptly--and pg3200.txt +promptly. pg3200.txt +promptly: pg3200.txt +promptly; pg3200.txt +promptness. pg3200.txt +prompts pg3200.txt, pg100.txt +promulgate pg3200.txt +promulgated. pg3200.txt +prone pg31100.txt +prone, pg100.txt +prong pg3200.txt +prongs, pg3200.txt +pronoun pg3200.txt +pronoun, pg100.txt +pronoun--enterprise. pg3200.txt +pronounc'd pg100.txt +pronounc'd. pg100.txt +pronounce pg31100.txt, pg3200.txt, pg100.txt +pronounce, pg3200.txt, pg100.txt +pronounce. pg31100.txt, pg100.txt +pronounce.) pg3200.txt +pronounce: pg100.txt +pronounced pg31100.txt, pg3200.txt +pronounced, pg3200.txt +pronounced. pg3200.txt +pronouncing pg31100.txt +pronouncing) pg3200.txt +pronouns. pg100.txt +pronunciation pg3200.txt +pronunciation! pg3200.txt +pronunciation--england pg3200.txt +pronunciation." pg3200.txt +proof pg31100.txt, pg3200.txt, pg100.txt +proof! pg100.txt +proof!" pg3200.txt +proof, pg3200.txt, pg100.txt +proof-reader pg3200.txt +proof. pg31100.txt, pg100.txt +proof." pg31100.txt, pg3200.txt +proof.' pg3200.txt +proof: pg100.txt +proof; pg31100.txt, pg3200.txt, pg100.txt +proof? pg100.txt +proofread pg31100.txt, pg3200.txt, pg100.txt +proofs pg31100.txt, pg3200.txt, pg100.txt +proofs, pg31100.txt, pg3200.txt +proofs----" pg3200.txt +proofs--except pg3200.txt +proofs. pg100.txt +proofs." pg3200.txt +proofs?" pg3200.txt +proofs?--one pg3200.txt +prop pg100.txt +prop. pg100.txt +propagate pg100.txt +propagate. pg3200.txt +propagating pg3200.txt +propagation pg3200.txt +propeller pg3200.txt +propeller, pg3200.txt +propend pg100.txt +propensity, pg31100.txt +proper pg31100.txt, pg3200.txt, pg100.txt +proper, pg31100.txt, pg3200.txt +proper--nobodies pg3200.txt +proper-false pg100.txt +proper. pg31100.txt, pg3200.txt, pg100.txt +proper; pg31100.txt, pg3200.txt +proper? pg100.txt +properer pg31100.txt, pg100.txt +properest pg31100.txt +properly pg31100.txt, pg3200.txt +properly, pg31100.txt, pg100.txt +properly. pg31100.txt +properly; pg3200.txt +propertied pg100.txt +propertied, pg100.txt +properties pg3200.txt, pg100.txt +properties, pg100.txt +properties. pg3200.txt +property pg31100.txt, pg3200.txt, pg100.txt +property, pg31100.txt, pg3200.txt, pg100.txt +property,) pg31100.txt +property--just pg3200.txt +property--lands, pg3200.txt +property--nothing pg3200.txt +property. pg3200.txt, pg100.txt +property." pg31100.txt, pg3200.txt +property: pg31100.txt +property; pg3200.txt +property? pg31100.txt +property?" pg31100.txt, pg3200.txt +prophecies pg3200.txt, pg100.txt +prophecies, pg100.txt +prophecies- pg100.txt +prophecies. pg3200.txt +prophecies? pg100.txt +prophecy pg3200.txt, pg100.txt +prophecy!" pg3200.txt +prophecy, pg3200.txt, pg100.txt +prophecy-savans--sociable pg3200.txt +prophecy. pg3200.txt, pg100.txt +prophecy." pg3200.txt +prophecy? pg100.txt +prophesied pg3200.txt +prophesier. pg100.txt +prophesies pg3200.txt +prophesy pg3200.txt, pg100.txt +prophesy, pg100.txt +prophesy- pg100.txt +prophesy. pg100.txt +prophesy: pg100.txt +prophesying pg3200.txt +prophesying--that's pg3200.txt +prophet pg3200.txt, pg100.txt +prophet's pg3200.txt +prophet, pg3200.txt, pg100.txt +prophet," pg3200.txt +prophet-like pg100.txt +prophet-shepherd, pg3200.txt +prophet. pg3200.txt +prophet." pg3200.txt +prophet: pg3200.txt +prophet?" pg3200.txt +prophetess pg3200.txt, pg100.txt +prophetess! pg100.txt +prophetess. pg100.txt +prophetess.' pg100.txt +prophets pg3200.txt +prophets, pg3200.txt +prophets. pg3200.txt, pg100.txt +propitious, pg31100.txt +proportion pg31100.txt, pg3200.txt, pg100.txt +proportion, pg31100.txt, pg100.txt +proportion. pg3200.txt +proportion; pg100.txt +proportionate pg31100.txt +proportioned pg31100.txt +proportioned, pg3200.txt +proportioned. pg3200.txt +proportions pg3200.txt, pg100.txt +proportions, pg3200.txt +proportions. pg3200.txt +propos'd pg100.txt +propos'd, pg100.txt +propos'd. pg100.txt +proposal pg31100.txt, pg3200.txt +proposal, pg31100.txt, pg3200.txt +proposal. pg31100.txt, pg3200.txt +proposal; pg31100.txt +proposal?" pg31100.txt +proposals pg3200.txt +proposals, pg31100.txt, pg3200.txt +proposals. pg31100.txt +propose pg31100.txt, pg3200.txt, pg100.txt +propose, pg100.txt +propose. pg3200.txt, pg100.txt +propose?" pg3200.txt +proposed pg31100.txt, pg3200.txt +proposed, pg31100.txt, pg100.txt +proposed. pg31100.txt, pg3200.txt +proposed: pg3200.txt +proposes pg3200.txt +proposing pg31100.txt, pg3200.txt +proposition pg3200.txt +proposition. pg3200.txt +proposition? pg3200.txt +propositions pg3200.txt +propositions: pg3200.txt +propounded pg100.txt +propped pg3200.txt +proprieties pg31100.txt, pg3200.txt +proprieties! pg3200.txt +proprieties. pg3200.txt +proprietor pg3200.txt +proprietor. pg3200.txt +proprietors pg3200.txt +proprietors, pg3200.txt +proprietors. pg3200.txt +propriety pg31100.txt, pg3200.txt +propriety, pg31100.txt +propriety,--it pg31100.txt +propriety. pg100.txt +propriety." pg31100.txt +props, pg3200.txt +prorogue pg100.txt +proscription. pg100.txt +prose pg31100.txt, pg3200.txt +prose, pg3200.txt +prose. pg31100.txt, pg3200.txt, pg100.txt +prose." pg31100.txt +prosecute pg100.txt +prosecute. pg3200.txt +prosecuted; pg3200.txt +prosecution pg3200.txt, pg100.txt +prosecution, pg3200.txt +prosecution. pg3200.txt +prosecution? pg3200.txt +prosecutor pg3200.txt +proselytes pg100.txt +proserpina, pg100.txt +prospect pg31100.txt, pg3200.txt, pg100.txt +prospect, pg31100.txt, pg3200.txt +prospect.' pg3200.txt +prospect: pg3200.txt +prospected pg3200.txt +prospecting pg3200.txt +prospective pg3200.txt +prospects pg31100.txt, pg3200.txt +prospects, pg3200.txt +prospects--that pg31100.txt +prospects. pg31100.txt, pg3200.txt +prospects; pg31100.txt +prospectus?" pg3200.txt +prosper pg3200.txt, pg100.txt +prosper! pg100.txt +prosper. pg31100.txt, pg100.txt +prospered pg3200.txt +prospered, pg3200.txt +prospering, pg3200.txt +prospering. pg3200.txt +prospering.' pg3200.txt +prosperities pg3200.txt +prosperity pg31100.txt, pg3200.txt, pg100.txt +prosperity, pg3200.txt, pg100.txt +prosperity. pg31100.txt, pg3200.txt, pg100.txt +prosperity." pg3200.txt +prosperity? pg100.txt +prospero pg100.txt +prospero, pg100.txt +prospero. pg100.txt +prospero; pg100.txt +prosperous pg31100.txt, pg3200.txt, pg100.txt +prosperous! pg100.txt +prosperous, pg31100.txt, pg3200.txt +prosperous. pg31100.txt, pg3200.txt, pg100.txt +prosperous." pg3200.txt +prostitute pg3200.txt +prostitute; pg3200.txt +prostitution pg3200.txt +prostrate pg3200.txt, pg100.txt +protect pg3200.txt, pg100.txt +protected pg31100.txt, pg3200.txt, pg100.txt +protected, pg3200.txt +protected. pg31100.txt +protected." pg3200.txt +protected; pg3200.txt +protecting pg31100.txt, pg3200.txt +protecting, pg3200.txt +protection pg31100.txt, pg3200.txt, pg100.txt +protection, pg31100.txt, pg3200.txt +protection,' pg3200.txt +protection. pg3200.txt, pg100.txt +protection." pg3200.txt +protection: pg3200.txt +protection; pg100.txt +protection? pg100.txt +protector pg3200.txt, pg100.txt +protector, pg3200.txt, pg100.txt +protector. pg3200.txt, pg100.txt +protector." pg3200.txt +protector; pg3200.txt +protector? pg100.txt +protector?" pg3200.txt +protectors pg3200.txt +protectorship, pg100.txt +protectorship. pg100.txt +protects pg3200.txt +protest pg31100.txt, pg3200.txt, pg100.txt +protest!--only pg31100.txt +protest, pg31100.txt, pg3200.txt, pg100.txt +protest- pg100.txt +protest. pg3200.txt, pg100.txt +protest; pg3200.txt +protestant pg3200.txt +protestantism pg3200.txt +protestants. pg3200.txt +protestation pg100.txt +protestations pg31100.txt, pg3200.txt +protestations, pg100.txt +protested pg31100.txt +protested, pg3200.txt +protester, pg100.txt +protesting pg31100.txt +protesting. pg3200.txt +protests pg31100.txt, pg100.txt +protests. pg3200.txt +proteus pg100.txt +proteus, pg100.txt +proteus. pg100.txt +proteus.' pg100.txt +proteus: pg100.txt +proteus? pg100.txt +protoplasm pg3200.txt +protoplasm. pg3200.txt +protracted pg31100.txt, pg3200.txt +protuberant pg3200.txt +proud pg31100.txt, pg3200.txt, pg100.txt +proud! pg100.txt +proud, pg31100.txt, pg3200.txt, pg100.txt +proud-minded; pg100.txt +proud. pg3200.txt, pg100.txt +proud." pg31100.txt +proud.") pg3200.txt +proud.' pg100.txt +proud; pg31100.txt, pg100.txt +proud? pg100.txt +prouder pg3200.txt +proudest pg3200.txt, pg100.txt +proudlier, pg100.txt +proudly pg31100.txt, pg3200.txt +proudly: pg3200.txt +prouds, pg100.txt +prov'd pg100.txt +provand pg100.txt +prove pg31100.txt, pg3200.txt, pg100.txt +prove, pg3200.txt, pg100.txt +prove- pg100.txt +prove. pg3200.txt, pg100.txt +prove." pg3200.txt +prove: pg3200.txt +prove; pg100.txt +prove? pg100.txt +prove?" pg3200.txt +proved pg31100.txt, pg3200.txt, pg100.txt +proved, pg31100.txt, pg100.txt +proved. pg3200.txt, pg100.txt +proved;--for pg31100.txt +proven pg3200.txt +proven) pg3200.txt +proven. pg3200.txt +proven.' pg3200.txt +provender, pg100.txt +provender. pg100.txt +proverb pg3200.txt, pg100.txt +proverb." pg31100.txt +proverb: pg3200.txt +proverbial pg3200.txt +proverbs- pg100.txt +proves pg3200.txt, pg100.txt +provide pg31100.txt, pg3200.txt, pg100.txt +provide" pg3200.txt +provide'?" pg3200.txt +provide, pg31100.txt, pg100.txt +provide. pg100.txt +provide." pg3200.txt +provided pg31100.txt, pg3200.txt, pg100.txt +provided, pg31100.txt, pg3200.txt +provided. pg3200.txt, pg100.txt +provided; pg100.txt +providence pg3200.txt, pg100.txt +providence!'" pg3200.txt +providence, pg3200.txt +providence," pg3200.txt +providence--they pg3200.txt +providence. pg3200.txt +providences--namely, pg3200.txt +providences; pg3200.txt +provident pg100.txt +providential pg3200.txt +provider. pg100.txt +provides pg3200.txt +provides! pg100.txt +providing pg31100.txt, pg3200.txt, pg100.txt +province pg31100.txt, pg3200.txt +province's pg3200.txt +province. pg3200.txt +province." pg3200.txt +provinces pg3200.txt +provinces, pg100.txt +provinces. pg100.txt +provinces? pg100.txt +provincial pg3200.txt +proving pg31100.txt, pg3200.txt +proving, pg31100.txt +provision pg31100.txt, pg3200.txt, pg100.txt +provision. pg100.txt +provision." pg31100.txt +provisions pg31100.txt, pg3200.txt +provisions, pg3200.txt +provisions. pg31100.txt, pg3200.txt, pg100.txt +provisions." pg3200.txt +provisions.[2] pg3200.txt +provisions; pg31100.txt +provocation. pg31100.txt, pg100.txt +provocation." pg3200.txt +provocation; pg3200.txt +provocation? pg3200.txt +provocative pg3200.txt +provok'd pg100.txt +provok'd? pg100.txt +provoke pg31100.txt, pg3200.txt, pg100.txt +provoke. pg100.txt +provoke? pg100.txt +provoked pg31100.txt, pg3200.txt +provoking pg31100.txt, pg3200.txt +provoking! pg31100.txt +provoking!" pg31100.txt +provoking!--i pg31100.txt +provoking. pg31100.txt +provost pg100.txt +provost, pg100.txt +provost. pg100.txt +provost? pg100.txt +prow pg3200.txt +prowess pg3200.txt +prowess. pg3200.txt +prowling pg3200.txt +proximity pg3200.txt +proxy!" pg31100.txt +proxy. pg3200.txt +prudence pg31100.txt +prudence, pg31100.txt, pg3200.txt, pg100.txt +prudence. pg3200.txt +prudent pg31100.txt, pg3200.txt +prudent!" pg3200.txt +prudent, pg31100.txt +prudent. pg31100.txt +prudhomme. pg3200.txt +prudish; pg3200.txt +prunes pg100.txt +prunes- pg100.txt +prussia pg3200.txt +prussia! pg3200.txt +prussia!! pg3200.txt +prussia. pg3200.txt +prussian pg3200.txt +pry pg3200.txt, pg100.txt +pry, pg100.txt +pry. pg3200.txt +prying pg3200.txt +psychologized pg3200.txt +psychosuperintangibly pg3200.txt +pterodactyl. pg3200.txt +ptolemy pg100.txt +ptolemy, pg100.txt +pub. pg3200.txt +public pg31100.txt, pg3200.txt, pg100.txt +public! pg31100.txt +public) pg3200.txt +public, pg3200.txt, pg100.txt +public--and pg3200.txt +public--it pg3200.txt +public-school pg3200.txt +public. pg31100.txt, pg3200.txt +public." pg31100.txt, pg3200.txt +public: pg3200.txt +publicans pg3200.txt +publication pg3200.txt +publication, pg31100.txt, pg3200.txt +publication. pg3200.txt +publications, pg31100.txt, pg3200.txt +publications. pg3200.txt +publicity. pg31100.txt +publicly pg31100.txt, pg3200.txt +publicly, pg100.txt +publicly. pg3200.txt, pg100.txt +publicola, pg100.txt +publish pg31100.txt, pg3200.txt, pg100.txt +publish'd, pg100.txt +publish, pg3200.txt +publish?" pg3200.txt +published pg31100.txt, pg3200.txt +published). pg3200.txt +published, pg3200.txt +published. pg3200.txt, pg100.txt +publisher pg3200.txt +publisher, pg31100.txt, pg3200.txt +publisher. pg31100.txt, pg3200.txt +publisher." pg3200.txt +publisher.) pg3200.txt +publisher? pg3200.txt +publishers pg3200.txt +publishers. pg3200.txt +publishers?" pg3200.txt +publishing pg3200.txt +publius- pg100.txt +publius. pg100.txt +publius; pg3200.txt +publius? pg100.txt +pucelle pg100.txt +pucelle, pg100.txt +puck pg100.txt +puck, pg100.txt +pucker pg3200.txt +puckittypukk pg3200.txt +pudd'n-headed pg3200.txt +pudd'nhead pg3200.txt +pudd'nhead, pg3200.txt +pudd'nhead--it's pg3200.txt +pudd'nhead?" pg3200.txt +pudd'nheads. pg3200.txt +pudding pg3200.txt, pg100.txt +pudding, pg3200.txt +pudding-headed pg3200.txt +pudding. pg3200.txt +puddings. pg100.txt +puddle pg3200.txt, pg100.txt +puddle, pg3200.txt +puddles, pg3200.txt +puddles. pg3200.txt +puerility pg3200.txt +puerility. pg3200.txt +puff pg31100.txt, pg3200.txt, pg100.txt +puff! pg100.txt +puff'd, pg100.txt +puff, pg3200.txt +puffed pg3200.txt +puffer, pg3200.txt +puffing pg3200.txt +puffs pg3200.txt +pug pg31100.txt, pg3200.txt +puggawaugun, pg3200.txt +puissance pg3200.txt, pg100.txt +puissance; pg100.txt +puissant pg3200.txt +puissant. pg3200.txt +pulcher. pg100.txt +pull pg3200.txt, pg100.txt +pull'd pg100.txt +pull, pg3200.txt +pull. pg3200.txt +pulled pg31100.txt, pg3200.txt +pulled, pg3200.txt +pullet-sperm pg100.txt +pulley pg3200.txt +pulleys, pg3200.txt +pulling pg31100.txt, pg3200.txt, pg100.txt +pulls pg3200.txt, pg100.txt +pulp pg3200.txt +pulpit pg3200.txt +pulpit! pg3200.txt +pulpit, pg3200.txt +pulpit. pg3200.txt, pg100.txt +pulpit." pg31100.txt, pg3200.txt +pulpit; pg3200.txt +pulpit? pg3200.txt +pulpits pg3200.txt +pulsation. pg3200.txt +pulse pg100.txt +pulse, pg3200.txt, pg100.txt +pulse. pg3200.txt, pg100.txt +pulsed pg3200.txt +pulsing pg3200.txt +pulteney pg31100.txt +pumice-stone pg3200.txt +pumice-stone, pg3200.txt +pummel pg3200.txt +pump pg3200.txt +pump! pg3200.txt +pump, pg3200.txt +pump-room pg31100.txt +pump-room, pg31100.txt +pump-room. pg31100.txt +pump-room; pg31100.txt +pump. pg3200.txt +pumped pg3200.txt +pumpkins pg3200.txt +pumps pg3200.txt +pun pg3200.txt +pun, pg31100.txt +pun. pg3200.txt +punaise pg3200.txt +punch pg3200.txt +punch! pg3200.txt +punch, pg3200.txt +punch--punch--oh, pg3200.txt +punch. pg3200.txt +punch." pg3200.txt +punch?" pg3200.txt +punches, pg3200.txt +punching pg3200.txt +punctual pg3200.txt +punctual; pg3200.txt +punctual? pg31100.txt +punctuality pg31100.txt +punctuality, pg31100.txt +punctually, pg31100.txt +punctuate pg3200.txt +punctuated pg3200.txt +punctuation pg31100.txt, pg100.txt +punctuation--from pg3200.txt +punctuation. pg3200.txt +pungent--" pg3200.txt +punish pg3200.txt, pg100.txt +punish'd pg100.txt +punish'd. pg100.txt +punish. pg100.txt +punish; pg100.txt +punished pg3200.txt +punished, pg3200.txt +punished. pg31100.txt, pg100.txt +punished; pg3200.txt, pg100.txt +punishing pg3200.txt +punishment pg31100.txt, pg3200.txt, pg100.txt +punishment, pg3200.txt +punishment. pg31100.txt, pg3200.txt, pg100.txt +punishment." pg31100.txt, pg3200.txt +punishment; pg31100.txt, pg3200.txt +punishment? pg100.txt +punishment?" pg3200.txt +punitive pg31100.txt, pg3200.txt, pg100.txt +puns; pg3200.txt +puny, pg31100.txt +pup pg3200.txt +pupil pg3200.txt, pg100.txt +pupil-like, pg100.txt +pupil. pg3200.txt +pupil; pg31100.txt +pupils pg3200.txt +pupils, pg3200.txt +pupils. pg3200.txt +puppet pg100.txt +puppet! pg100.txt +puppet, pg3200.txt +puppies pg31100.txt, pg3200.txt +puppies! pg100.txt +puppies._] pg31100.txt +puppy pg3200.txt, pg100.txt +puppy! pg3200.txt +puppy!" pg3200.txt +puppy!--you pg31100.txt +puppy- pg100.txt +puppy-dog. pg100.txt +puppy-dogs! pg100.txt +puppy. pg100.txt +puppy." pg31100.txt +puppyism. pg31100.txt +pups, pg3200.txt +purblind pg100.txt +purchas'd pg100.txt +purchase pg31100.txt, pg3200.txt, pg100.txt +purchase, pg3200.txt, pg100.txt +purchase. pg31100.txt, pg3200.txt, pg100.txt +purchase." pg31100.txt +purchase; pg31100.txt +purchase?" pg3200.txt +purchased pg31100.txt, pg3200.txt +purchasers pg3200.txt +purchasers, pg3200.txt +purchases pg3200.txt +purchaseth. pg100.txt +purchasing pg31100.txt, pg3200.txt +purchasing, pg3200.txt +purchasing. pg100.txt +pure pg31100.txt, pg3200.txt, pg100.txt +pure, pg3200.txt, pg100.txt +pure. pg3200.txt, pg100.txt +pure." pg3200.txt +pure; pg3200.txt, pg100.txt +pure? pg100.txt +pure?" pg3200.txt +purely pg3200.txt +purely, pg3200.txt +purer pg3200.txt +purest pg3200.txt +purest, pg3200.txt +purfled pg3200.txt +purgation, pg100.txt +purgation. pg100.txt +purgatory pg3200.txt +purgatory. pg3200.txt +purge pg100.txt +purge, pg100.txt +purge. pg100.txt +purification pg3200.txt +purification. pg3200.txt +purified pg3200.txt +purified. pg3200.txt +purifying pg3200.txt +purifying. pg3200.txt +puritan. pg100.txt +purities, pg3200.txt +purity pg31100.txt, pg3200.txt, pg100.txt +purity! pg100.txt +purity, pg3200.txt, pg100.txt +purity. pg3200.txt +purloined." pg3200.txt +purple pg31100.txt, pg3200.txt +purple, pg3200.txt +purple-plumed pg3200.txt +purple. pg3200.txt +purple; pg100.txt +purples pg3200.txt +purples, pg100.txt +purport pg31100.txt, pg3200.txt, pg100.txt +purport. pg3200.txt +purports pg3200.txt +purpos'd pg100.txt +purpos'd, pg100.txt +purpos'd. pg100.txt +purpose pg31100.txt, pg3200.txt, pg100.txt +purpose! pg100.txt +purpose!" pg3200.txt +purpose, pg31100.txt, pg3200.txt, pg100.txt +purpose- pg100.txt +purpose. pg31100.txt, pg3200.txt, pg100.txt +purpose." pg31100.txt, pg3200.txt +purpose.' pg100.txt +purpose: pg31100.txt, pg100.txt +purpose; pg3200.txt, pg100.txt +purpose? pg31100.txt, pg100.txt +purpose?" pg3200.txt +purposed pg100.txt +purposed! pg100.txt +purposed, pg100.txt +purposely pg31100.txt, pg3200.txt, pg100.txt +purposely, pg3200.txt +purposes pg31100.txt, pg3200.txt, pg100.txt +purposes! pg100.txt +purposes, pg3200.txt, pg100.txt +purposes- pg100.txt +purposes. pg3200.txt, pg100.txt +purposes; pg100.txt +purposeth, pg100.txt +purposing pg3200.txt +purred pg3200.txt +purse pg100.txt +purse! pg100.txt +purse, pg31100.txt, pg3200.txt, pg100.txt +purse-taking. pg100.txt +purse. pg3200.txt, pg100.txt +purse; pg100.txt +purse? pg100.txt +purse] pg100.txt +pursed pg3200.txt +purser--the pg3200.txt +purses pg3200.txt, pg100.txt +purses. pg100.txt +purses; pg100.txt +purses? pg100.txt +pursue pg31100.txt, pg3200.txt, pg100.txt +pursue, pg100.txt +pursued pg31100.txt, pg3200.txt +pursued. pg3200.txt +pursued? pg100.txt +pursuer, pg3200.txt +pursues pg3200.txt, pg100.txt +pursues.' pg100.txt +pursues; pg100.txt +pursueth pg100.txt +pursuing pg31100.txt, pg3200.txt +pursuit pg31100.txt, pg3200.txt, pg100.txt +pursuit, pg31100.txt, pg100.txt +pursuit," pg3200.txt +pursuit. pg3200.txt, pg100.txt +pursuit? pg100.txt +pursuits pg31100.txt +pursuits, pg31100.txt +pursuits. pg31100.txt, pg3200.txt +pursuivant pg100.txt +pursuivant, pg100.txt +pursuivant-at-arms pg100.txt +pursuivants, pg100.txt +purus, pg100.txt +push pg31100.txt, pg3200.txt, pg100.txt +push.- pg100.txt +pushed pg3200.txt +pushed--ought pg3200.txt +pushes pg3200.txt +pushing pg31100.txt, pg3200.txt +pushing, pg31100.txt +pusillanimous pg3200.txt +puss." pg3200.txt +put pg31100.txt, pg3200.txt, pg100.txt +put'st pg100.txt +put-off. pg31100.txt +put-to pg31100.txt +put-up pg3200.txt +put." pg31100.txt +putnam's pg3200.txt +putney, pg31100.txt +putney." pg31100.txt +putrefaction--pictures pg3200.txt +putrid pg31100.txt +puts pg3200.txt, pg100.txt +putter-on pg100.txt +putter. pg100.txt +puttin' pg3200.txt +putting pg31100.txt, pg3200.txt +puttock. pg100.txt +putty. pg3200.txt +putzel pg3200.txt +putzel, pg3200.txt +puz--" pg3200.txt +puzzle pg31100.txt, pg3200.txt +puzzle, pg31100.txt, pg3200.txt +puzzle. pg31100.txt, pg3200.txt +puzzle." pg31100.txt, pg3200.txt +puzzled pg31100.txt, pg3200.txt +puzzled, pg3200.txt +puzzled. pg31100.txt +puzzlesome pg3200.txt +puzzling pg3200.txt +puzzling. pg3200.txt +pyramid pg3200.txt +pyramid, pg3200.txt +pyramid. pg3200.txt +pyramid; pg100.txt +pyramids pg3200.txt +pyramids. pg3200.txt +pyramises pg100.txt +pyramus pg100.txt +pyramus, pg100.txt +pyramus. pg100.txt +pyramus; pg100.txt +pyrrhus pg100.txt +pyrrhus: pg100.txt +pythagoras pg100.txt +qu'aucune pg3200.txt +qu'elle pg3200.txt +qu--" pg3200.txt +quack, pg3200.txt +quad. pg3200.txt +quadrangle, pg31100.txt, pg100.txt +quadrille pg3200.txt +quadroons pg3200.txt +quadruped pg3200.txt +quadruple pg3200.txt +quadrupled, pg3200.txt +quagmire pg3200.txt +quail pg100.txt +quailed, pg3200.txt +quails pg3200.txt, pg100.txt +quaint pg31100.txt, pg3200.txt, pg100.txt +quaint, pg31100.txt, pg3200.txt +quaintest pg3200.txt +quaintly pg3200.txt, pg100.txt +quaintness pg3200.txt +quaintnesses, pg3200.txt +quake pg3200.txt +quake, pg100.txt +quake. pg3200.txt, pg100.txt +quaked, pg3200.txt +quaker pg3200.txt +quaker. pg3200.txt +quakers, pg3200.txt +quakes pg3200.txt +quakes. pg100.txt +quaking pg3200.txt +qualification pg3200.txt, pg100.txt +qualification. pg3200.txt +qualification." pg31100.txt, pg3200.txt +qualifications pg31100.txt, pg3200.txt +qualifications, pg31100.txt +qualifications. pg31100.txt +qualified pg31100.txt, pg3200.txt +qualified. pg31100.txt, pg100.txt +qualify pg100.txt +qualify, pg100.txt +qualitat pg3200.txt +qualite. pg100.txt +qualities pg31100.txt, pg3200.txt, pg100.txt +qualities, pg31100.txt, pg3200.txt, pg100.txt +qualities--charity, pg3200.txt +qualities. pg3200.txt, pg100.txt +qualities; pg31100.txt, pg100.txt +quality pg31100.txt, pg3200.txt, pg100.txt +quality, pg31100.txt, pg3200.txt, pg100.txt +quality. pg3200.txt, pg100.txt +quality." pg3200.txt +quality: pg3200.txt +quality; pg100.txt +quality? pg100.txt +qualm. pg100.txt +qualmish, pg3200.txt +quand pg3200.txt +quantities pg3200.txt +quantities, pg3200.txt +quantity pg31100.txt, pg3200.txt +quantity, pg3200.txt, pg100.txt +quantity. pg31100.txt, pg100.txt +quantity." pg3200.txt +quantity?" pg3200.txt +quarantine pg3200.txt +quarantine-breaking pg3200.txt +quarantined pg3200.txt +quarrel pg31100.txt, pg3200.txt, pg100.txt +quarrel's pg100.txt +quarrel, pg3200.txt, pg100.txt +quarrel--that pg3200.txt +quarrel. pg3200.txt, pg100.txt +quarrel; pg100.txt +quarrel? pg100.txt +quarrel?" pg3200.txt +quarreled pg3200.txt +quarreled, pg3200.txt +quarreling pg3200.txt +quarreling, pg3200.txt +quarreling. pg3200.txt +quarrelings pg3200.txt +quarrell'd pg100.txt +quarrelled, pg3200.txt +quarrelled.-- pg31100.txt +quarrelling pg31100.txt +quarrelling! pg100.txt +quarrelling, pg3200.txt, pg100.txt +quarrels pg3200.txt, pg100.txt +quarrels. pg100.txt +quarrelsome. pg3200.txt +quarries pg3200.txt +quarry pg3200.txt, pg100.txt +quart pg3200.txt +quarter pg31100.txt, pg3200.txt, pg100.txt +quarter"--shriveled pg3200.txt +quarter'd pg100.txt +quarter'd; pg100.txt +quarter, pg31100.txt, pg3200.txt +quarter-acre pg3200.txt +quarter-boats pg3200.txt +quarter-boats. pg3200.txt +quarter-century pg3200.txt +quarter-civilization. pg3200.txt +quarter-deck. pg3200.txt +quarter-less--' pg3200.txt +quarter-less.' pg3200.txt +quarter-moon pg3200.txt +quarter-staff, pg3200.txt +quarter. pg31100.txt, pg3200.txt, pg100.txt +quarter." pg31100.txt, pg3200.txt +quarter.--all pg3200.txt +quarter:--robert pg31100.txt +quarter; pg3200.txt, pg100.txt +quartered pg31100.txt, pg3200.txt +quarterings pg3200.txt +quarterly pg3200.txt +quartermaster pg3200.txt +quartermaster's pg3200.txt +quartermaster, pg3200.txt +quarters pg3200.txt, pg100.txt +quarters, pg31100.txt, pg3200.txt +quarters. pg31100.txt, pg3200.txt +quarters." pg3200.txt +quarters; pg3200.txt +quarto pg3200.txt +quarto, pg3200.txt +quarts. pg100.txt +quartz pg3200.txt +quartz, pg3200.txt +quartz-mining pg3200.txt +quay. pg100.txt +que pg3200.txt, pg100.txt +quean! pg100.txt +queasy pg100.txt +queen pg31100.txt, pg3200.txt, pg100.txt +queen! pg100.txt +queen!' pg100.txt +queen'? pg100.txt +queen's pg3200.txt +queen's, pg100.txt +queen's." pg3200.txt +queen, pg3200.txt, pg100.txt +queen- pg100.txt +queen-' pg100.txt +queen-- pg3200.txt +queen. pg31100.txt, pg3200.txt, pg100.txt +queen." pg3200.txt +queen.] pg100.txt +queen: pg3200.txt, pg100.txt +queen; pg100.txt +queen? pg100.txt +queene pg3200.txt +queenes pg3200.txt +queens pg3200.txt +queens. pg3200.txt, pg100.txt +queens; pg100.txt +queens? pg100.txt +queensland pg3200.txt +queensland, pg3200.txt +queensland. pg3200.txt +queensland." pg3200.txt +queer pg3200.txt +queerest pg3200.txt +quell pg31100.txt, pg100.txt +quell. pg100.txt +quell? pg100.txt +quench pg100.txt +quench'd pg100.txt +quench. pg100.txt +quenched. pg3200.txt +queries pg3200.txt +quern, pg100.txt +quest pg31100.txt, pg3200.txt, pg100.txt +quest, pg3200.txt +quest. pg3200.txt +question pg31100.txt, pg3200.txt, pg100.txt +question! pg3200.txt, pg100.txt +question!" pg3200.txt +question" pg3200.txt +question'd pg100.txt +question, pg31100.txt, pg3200.txt, pg100.txt +question," pg31100.txt +question,-- pg3200.txt +question-- pg31100.txt, pg3200.txt +question--according pg3200.txt +question--and pg31100.txt +question--never pg3200.txt +question. pg31100.txt, pg3200.txt, pg100.txt +question." pg3200.txt +question.' pg3200.txt +question: pg31100.txt, pg3200.txt, pg100.txt +question; pg31100.txt, pg3200.txt +question? pg3200.txt, pg100.txt +question?" pg3200.txt +questionable pg3200.txt +questionable, pg3200.txt +questionable. pg3200.txt +questionable; pg31100.txt +questioned pg31100.txt, pg3200.txt +questioned, pg31100.txt +questioned. pg3200.txt, pg100.txt +questioned." pg3200.txt +questioned: pg3200.txt +questioner pg3200.txt +questioner. pg3200.txt +questioner." pg31100.txt +questioning pg3200.txt +questioning, pg100.txt +questionings pg3200.txt +questionings. pg3200.txt +questions pg31100.txt, pg3200.txt, pg100.txt +questions!" pg3200.txt +questions, pg31100.txt, pg3200.txt +questions-- pg3200.txt +questions--about pg31100.txt +questions. pg31100.txt, pg3200.txt, pg100.txt +questions." pg31100.txt, pg3200.txt +questions.) pg3200.txt +questions; pg31100.txt, pg100.txt +questions? pg100.txt +quests pg100.txt +qui pg3200.txt +qui's, pg100.txt +quibble pg3200.txt +quick pg31100.txt, pg3200.txt, pg100.txt +quick! pg31100.txt +quick!" pg3200.txt +quick, pg31100.txt, pg3200.txt, pg100.txt +quick- pg100.txt +quick--and pg31100.txt +quick--come pg3200.txt +quick-sighted, pg31100.txt +quick-silver. pg100.txt +quick. pg31100.txt, pg3200.txt, pg100.txt +quick." pg3200.txt +quick?' pg3200.txt +quicken pg3200.txt, pg100.txt +quickened pg31100.txt, pg3200.txt +quicker pg3200.txt +quicker, pg3200.txt +quicker. pg3200.txt +quickest pg3200.txt +quickest. pg31100.txt, pg3200.txt +quickly pg31100.txt, pg3200.txt, pg100.txt +quickly! pg31100.txt, pg100.txt +quickly, pg3200.txt, pg100.txt +quickly," pg31100.txt +quickly,' pg100.txt +quickly--"but pg31100.txt +quickly--he pg31100.txt +quickly. pg3200.txt, pg100.txt +quickly." pg3200.txt +quickly: pg3200.txt +quickly; pg3200.txt, pg100.txt +quickness pg31100.txt, pg3200.txt +quickness, pg31100.txt +quicksand. pg3200.txt +quicksilver pg3200.txt +quicksilver, pg3200.txt +quicksilver. pg3200.txt +quid, pg3200.txt +quiescent. pg3200.txt +quiescent; pg3200.txt +quiet pg31100.txt, pg3200.txt, pg100.txt +quiet! pg31100.txt, pg3200.txt +quiet, pg31100.txt, pg3200.txt, pg100.txt +quiet--even pg3200.txt +quiet--never pg31100.txt +quiet--whew! pg3200.txt +quiet-looking pg31100.txt +quiet. pg31100.txt, pg3200.txt, pg100.txt +quiet." pg31100.txt, pg3200.txt +quiet.) pg3200.txt +quiet; pg31100.txt, pg3200.txt, pg100.txt +quiet? pg3200.txt +quieted pg31100.txt, pg3200.txt +quieter pg31100.txt, pg3200.txt +quieter, pg31100.txt +quieter. pg3200.txt, pg100.txt +quietly pg31100.txt, pg3200.txt, pg100.txt +quietly. pg3200.txt, pg100.txt +quietly: pg3200.txt +quietness pg31100.txt +quietness. pg3200.txt +quietness; pg100.txt +quietus pg100.txt +quiggle pg3200.txt +quill, pg100.txt +quill. pg3200.txt, pg100.txt +quills, pg3200.txt +quilt? pg100.txt +quimby pg3200.txt +quince pg100.txt +quince! pg100.txt +quince. pg100.txt +quincy pg3200.txt +quincy.' pg3200.txt +quintessence pg3200.txt +quintus pg100.txt +quip pg100.txt +quips pg3200.txt, pg100.txt +quips, pg100.txt +quirk. pg100.txt +quit pg31100.txt, pg3200.txt, pg100.txt +quit, pg3200.txt +quit. pg3200.txt, pg100.txt +quit.' pg3200.txt +quite pg31100.txt, pg3200.txt, pg100.txt +quite, pg100.txt +quite. pg3200.txt, pg100.txt +quite." pg3200.txt +quits pg31100.txt +quittance. pg100.txt +quitted pg31100.txt, pg100.txt +quitting pg31100.txt, pg3200.txt +quiver pg100.txt +quiver, pg3200.txt +quivered pg3200.txt +quivered. pg3200.txt +quivered; pg3200.txt +quivering pg3200.txt +quivering, pg3200.txt +quivers pg3200.txt +quixote," pg3200.txt +quixotic pg3200.txt +quiz pg31100.txt +quo. pg100.txt +quoint- pg100.txt +quondam pg100.txt +quorum. pg3200.txt +quota pg31100.txt +quotation pg3200.txt +quotation, pg3200.txt +quotation. pg3200.txt +quotations pg31100.txt, pg3200.txt +quote pg3200.txt +quoted pg31100.txt, pg3200.txt +quoted, pg3200.txt +quoted. pg3200.txt, pg100.txt +quotes pg3200.txt +quoth pg100.txt +r'ecompens'ee pg3200.txt +r,'s pg3200.txt +r-o-w, pg3200.txt +r-ramming pg3200.txt +r.] pg3200.txt +r.n., pg3200.txt +raad pg3200.txt +rab pg3200.txt +rab's pg3200.txt +rabateau, pg3200.txt +rabbi pg3200.txt +rabbit pg3200.txt, pg100.txt +rabbit! pg3200.txt +rabbit!" pg3200.txt +rabbit, pg3200.txt +rabbit--limbs pg3200.txt +rabbit; pg100.txt +rabbits pg3200.txt +rabbits. pg3200.txt +rabble pg3200.txt, pg100.txt +rabble, pg100.txt +rabble- pg100.txt +rabble. pg3200.txt +rabblement pg100.txt +rabelais pg3200.txt +rabelais, pg3200.txt +rabid pg3200.txt +raccoon, pg3200.txt +race pg3200.txt +race! pg3200.txt +race's pg3200.txt +race, pg3200.txt, pg100.txt +race," pg3200.txt +race--and pg3200.txt +race--including pg3200.txt +race-ground pg3200.txt +race-track pg3200.txt +race. pg31100.txt, pg3200.txt, pg100.txt +race." pg3200.txt +race; pg3200.txt +race?" pg3200.txt +raced pg3200.txt +racer, pg3200.txt +racers pg3200.txt +racers. pg3200.txt +races pg3200.txt +races" pg3200.txt +races, pg3200.txt +races. pg31100.txt, pg3200.txt +rachael, pg3200.txt +racing pg3200.txt +rack pg3200.txt +rack'd; pg100.txt +rack, pg31100.txt, pg3200.txt, pg100.txt +rack- pg3200.txt +rack-heap pg3200.txt +rack. pg100.txt +racked pg3200.txt +rackers pg100.txt +racket pg3200.txt +racket, pg3200.txt +racket. pg3200.txt +racks, pg3200.txt +radcliffe's pg31100.txt +radcliffe's; pg31100.txt +radiance pg3200.txt +radiant pg3200.txt +radiant, pg3200.txt +radiate pg3200.txt +radically, pg3200.txt +radish! pg100.txt +radish, pg100.txt +radishes. pg3200.txt +raf' pg3200.txt +raf'." pg3200.txt +raffle pg3200.txt +raft pg3200.txt +raft!" pg3200.txt +raft, pg3200.txt +raft--" pg3200.txt +raft--and pg3200.txt +raft. pg3200.txt +raft; pg3200.txt +raft? pg100.txt +raft?" pg3200.txt +rafters pg3200.txt +rafting. pg3200.txt +rafts pg3200.txt +raftsman pg3200.txt +raftsmen pg3200.txt +rag pg3200.txt +rag!" pg3200.txt +rag, pg3200.txt, pg100.txt +rag-tag pg3200.txt +rag. pg3200.txt +rage pg31100.txt, pg3200.txt, pg100.txt +rage! pg100.txt +rage, pg3200.txt, pg100.txt +rage. pg3200.txt, pg100.txt +rage.' pg100.txt +rage; pg3200.txt, pg100.txt +raged pg3200.txt +raged, pg3200.txt +rages pg3200.txt, pg100.txt +rages, pg100.txt +rages; pg100.txt +ragged pg3200.txt +ragged, pg3200.txt +ragged--" pg3200.txt +ragged-edged pg3200.txt +ragged. pg3200.txt +raggedness pg3200.txt +raggedness!" pg3200.txt +raging pg3200.txt, pg100.txt +raging, pg3200.txt +ragings pg3200.txt +rags pg3200.txt, pg100.txt +rags!" pg3200.txt +rags, pg3200.txt, pg100.txt +rags. pg3200.txt +rags? pg100.txt +ragtown. pg3200.txt +raid pg3200.txt +raid, pg3200.txt +raid. pg3200.txt +raider, pg3200.txt +raiding pg3200.txt +raids. pg3200.txt +rail pg31100.txt, pg3200.txt, pg100.txt +rail'd pg100.txt +rail, pg3200.txt +rail--" pg3200.txt +rail. pg3200.txt, pg100.txt +rail.' pg3200.txt +rail: pg3200.txt +rail; pg3200.txt, pg100.txt +railed pg3200.txt +railing pg3200.txt +railing, pg100.txt +railing? pg100.txt +railings pg3200.txt +railings, pg3200.txt +railings. pg3200.txt +railleries, pg3200.txt +raillery pg31100.txt +railroad pg3200.txt +railroad, pg3200.txt +railroad----" pg3200.txt +railroad. pg3200.txt +railroad." pg3200.txt +railroad?" pg3200.txt +railroading pg3200.txt +railroads pg3200.txt +rails pg3200.txt, pg100.txt +rails, pg100.txt +railway pg3200.txt +railway! pg3200.txt +railway, pg3200.txt +railway-station pg3200.txt +railway-track. pg3200.txt +railway-train; pg3200.txt +railway. pg3200.txt +railway; pg3200.txt +railways pg3200.txt +railways, pg3200.txt +railways; pg3200.txt +raiment pg3200.txt, pg100.txt +raiment, pg3200.txt +rain pg31100.txt, pg3200.txt, pg100.txt +rain! pg100.txt +rain!" pg3200.txt +rain'd pg100.txt +rain, pg31100.txt, pg3200.txt, pg100.txt +rain- pg100.txt +rain-- pg3200.txt +rain--and pg3200.txt +rain-storm pg3200.txt +rain-water. pg3200.txt +rain. pg31100.txt, pg3200.txt, pg100.txt +rain.--"i pg31100.txt +rain? pg100.txt +rainbow pg3200.txt +rainbow, pg3200.txt +rainbow-radiance pg3200.txt +rainbow; pg3200.txt, pg100.txt +rainbowed, pg3200.txt +rainbows pg3200.txt +rained pg31100.txt, pg3200.txt +rained--the pg3200.txt +rainfall pg3200.txt +rainfall. pg3200.txt +rainguesson pg3200.txt +rainguesson!" pg3200.txt +rainguesson, pg3200.txt +rainguesson," pg3200.txt +rainguesson." pg3200.txt +raining pg3200.txt +raining, pg3200.txt +rains pg31100.txt, pg3200.txt +rains. pg3200.txt +rains." pg31100.txt +rainy. pg3200.txt +rair pg3200.txt +rairing pg3200.txt +rais'd pg100.txt +rais'd, pg100.txt +raise pg31100.txt, pg3200.txt, pg100.txt +raise, pg31100.txt, pg3200.txt +raise--twenty pg3200.txt +raise. pg31100.txt, pg100.txt +raise." pg3200.txt +raise; pg100.txt +raised pg31100.txt, pg3200.txt, pg100.txt +raised, pg31100.txt +raised. pg3200.txt +raised." pg3200.txt +raises pg31100.txt, pg3200.txt +raises." pg31100.txt +raish pg3200.txt +raisin, pg3200.txt +raising pg31100.txt, pg3200.txt +raising, pg100.txt +raisins pg3200.txt +rajah pg3200.txt +rak'd, pg100.txt +rake pg3200.txt, pg100.txt +rake, pg3200.txt +raked pg3200.txt +raking. pg3200.txt +rallied pg3200.txt +rallied, pg3200.txt +ralls pg3200.txt +ralls. pg3200.txt +rally pg3200.txt +ralph. pg3200.txt, pg100.txt +ram pg3200.txt, pg100.txt +ram, pg3200.txt, pg100.txt +ram- pg3200.txt +ram. pg3200.txt +ram? pg3200.txt +ramble. pg31100.txt +rambler. pg31100.txt +rambles pg3200.txt +rambles. pg31100.txt +rambling, pg3200.txt +rambling--" pg3200.txt +rambures pg100.txt +rambures; pg100.txt +ramlah pg3200.txt +ramleh, pg3200.txt +ramm'd pg100.txt +rampages--all pg3200.txt +rampantly pg3200.txt +rampart pg3200.txt +rampart. pg3200.txt +ramparts pg31100.txt, pg3200.txt +ramparts. pg31100.txt +ramps, pg100.txt +rams pg100.txt +rams; pg100.txt +rams? pg100.txt +ramsgate." pg31100.txt, pg3200.txt +ramston, pg100.txt +ran pg31100.txt, pg3200.txt, pg100.txt +ran, pg31100.txt +ran. pg31100.txt, pg3200.txt +rana--but pg3200.txt +ranch pg3200.txt +ranch! pg3200.txt +ranch. pg3200.txt +ranching pg3200.txt +ranchmen pg3200.txt +randalls pg31100.txt +randalls, pg31100.txt +randalls. pg31100.txt +randalls." pg31100.txt +randalls; pg31100.txt +randalls?" pg31100.txt +random pg3200.txt +random, pg31100.txt, pg3200.txt +random," pg3200.txt +random--" pg3200.txt +random--all pg3200.txt +random: pg3200.txt +rang pg3200.txt +rang'd; pg100.txt +rang, pg3200.txt +rang. pg3200.txt +rang; pg3200.txt +range pg31100.txt, pg3200.txt, pg100.txt +range. pg3200.txt +range; pg31100.txt +ranged pg3200.txt +ranged, pg100.txt +ranger-saddle. pg3200.txt +rangers pg3200.txt +rangers, pg3200.txt +ranges pg3200.txt, pg100.txt +ranging pg3200.txt +ranging, pg100.txt +rangiriri pg3200.txt +rank pg31100.txt, pg3200.txt, pg100.txt +rank! pg100.txt +rank'd, pg100.txt +rank, pg31100.txt, pg3200.txt, pg100.txt +rank- pg100.txt +rank. pg3200.txt, pg100.txt +rank." pg31100.txt, pg3200.txt +rank; pg3200.txt +rank?" pg3200.txt +ranked pg31100.txt, pg3200.txt +ranking pg3200.txt, pg100.txt +rankle pg100.txt +rankles. pg3200.txt +rankness pg3200.txt +ranks pg3200.txt, pg100.txt +ranks, pg31100.txt, pg100.txt +ranks. pg3200.txt +ransack'd pg100.txt +ransacked pg3200.txt +ransacking pg3200.txt +ransacking, pg3200.txt +ransom pg3200.txt, pg100.txt +ransom'd. pg100.txt +ransom, pg3200.txt, pg100.txt +ransom. pg100.txt +ransom." pg3200.txt +ransom; pg3200.txt, pg100.txt +ransom? pg100.txt +ransom?" pg3200.txt +ransomed, pg3200.txt +ransomed. pg100.txt +ransomed." pg3200.txt +rant pg31100.txt +rants! pg31100.txt +rao pg3200.txt +rap pg31100.txt, pg3200.txt +rap. pg31100.txt +rapacity, pg3200.txt +rapacity; pg3200.txt +rape pg3200.txt, pg100.txt +rape, pg100.txt +rape- pg100.txt +rape. pg100.txt +rape; pg100.txt +rapes pg100.txt +rapes. pg100.txt +raphael pg3200.txt +raphael, pg3200.txt +raphael." pg3200.txt +rapid pg31100.txt, pg3200.txt +rapid, pg3200.txt +rapidity pg31100.txt, pg3200.txt +rapidity. pg31100.txt, pg3200.txt +rapidity." pg31100.txt +rapidly pg31100.txt, pg3200.txt +rapidly, pg3200.txt +rapidly. pg3200.txt +rapids--how pg3200.txt +rapier! pg100.txt +rapier, pg3200.txt +rapier. pg100.txt +rapine pg3200.txt +rapscallions pg3200.txt +rapscallions." pg3200.txt +rapt pg3200.txt +rapt, pg3200.txt +rapt. pg100.txt +rapture pg3200.txt +rapture, pg31100.txt, pg3200.txt +rapture. pg3200.txt +rapture." pg3200.txt +rapture: pg3200.txt +raptures pg31100.txt, pg100.txt +raptures, pg3200.txt +raptures. pg31100.txt +rapturous pg31100.txt, pg3200.txt +rare pg31100.txt, pg3200.txt, pg100.txt +rare, pg3200.txt, pg100.txt +rare- pg100.txt +rare--in pg3200.txt +rare. pg3200.txt, pg100.txt +rare; pg100.txt +rarely pg3200.txt +rarely! pg100.txt +rarely? pg100.txt +rarer, pg100.txt +rarer. pg3200.txt +rarest pg3200.txt, pg100.txt +rarest. pg3200.txt +rarities pg3200.txt +rarity pg100.txt +rascal pg3200.txt, pg100.txt +rascal! pg100.txt +rascal, pg3200.txt, pg100.txt +rascal. pg100.txt +rascal; pg100.txt +rascal? pg100.txt +rascal?" pg3200.txt +rascality pg3200.txt +rascally pg100.txt +rascals pg3200.txt, pg100.txt +rascals! pg3200.txt, pg100.txt +rascals!' pg3200.txt +rascals. pg3200.txt, pg100.txt +rascals? pg100.txt +rasch. pg3200.txt +raschid, pg3200.txt +rash pg31100.txt, pg3200.txt +rash, pg100.txt +rash; pg3200.txt, pg100.txt +rash? pg100.txt +rashanali, pg3200.txt +rasher pg100.txt +rashly pg3200.txt +rashly- pg100.txt +rashness, pg100.txt +rashness. pg100.txt +rasp, pg3200.txt +rasped pg3200.txt +rasping pg3200.txt +raspy pg3200.txt +rat pg3200.txt +rat! pg3200.txt +rat!' pg100.txt +rat, pg3200.txt, pg100.txt +rat-catcher; pg3200.txt +rat-holes, pg3200.txt +rat-holes." pg3200.txt +rat-trap pg3200.txt +rat. pg3200.txt +rat.' pg100.txt +rat? pg3200.txt +ratcliff pg100.txt +ratcliff! pg100.txt +ratcliff, pg100.txt +rate pg31100.txt, pg3200.txt, pg100.txt +rate! pg100.txt +rate, pg31100.txt, pg3200.txt, pg100.txt +rate,) pg3200.txt +rate- pg100.txt +rate--celebration pg3200.txt +rate--men pg3200.txt +rate. pg3200.txt, pg100.txt +rate." pg31100.txt +rate: pg3200.txt +rate; pg31100.txt, pg100.txt +rated pg31100.txt, pg100.txt +rates pg3200.txt, pg100.txt +rates, pg3200.txt +rates. pg3200.txt +rates: pg3200.txt +rather pg31100.txt, pg3200.txt, pg100.txt +rather, pg31100.txt, pg3200.txt, pg100.txt +rather- pg100.txt +rather. pg100.txt +rather; pg100.txt +rather? pg100.txt +rather?--i pg31100.txt +ratherest pg100.txt +ratification. pg3200.txt +ratified pg100.txt +ratified; pg100.txt +ratify pg3200.txt +ratify, pg100.txt +ratio pg3200.txt +ratio, pg3200.txt +ration pg3200.txt +rational pg31100.txt, pg3200.txt, pg100.txt +rational, pg31100.txt +rational. pg31100.txt, pg3200.txt +rational." pg3200.txt +rational; pg3200.txt +rationalists, pg3200.txt +rationality pg31100.txt +rationality, pg31100.txt +rationally pg31100.txt, pg3200.txt +rationally, pg3200.txt +rationally. pg31100.txt, pg3200.txt +rations pg3200.txt +rats pg3200.txt, pg100.txt +rats, pg3200.txt +rats. pg100.txt +rats." pg3200.txt +rats?" pg3200.txt +rattain pg3200.txt +ratter." pg3200.txt +rattle pg3200.txt +rattle! pg31100.txt +rattle-trap, pg3200.txt +rattle. pg3200.txt +rattle; pg31100.txt +rattled, pg3200.txt +rattled. pg3200.txt +rattles. pg3200.txt +rattlesnake pg3200.txt +rattlesnake-skin; pg3200.txt +rattlesnake." pg3200.txt +rattling pg3200.txt +rattling, pg3200.txt +rattoons pg3200.txt +ratty pg3200.txt +rav'd, pg100.txt +rav'nous pg100.txt +ravaged pg3200.txt +rave pg31100.txt, pg3200.txt +rave? pg100.txt +raved pg3200.txt +raved, pg3200.txt +ravel pg100.txt +raven pg3200.txt +raven, pg100.txt +raven. pg3200.txt, pg100.txt +raven." pg3200.txt +ravenous pg3200.txt +ravenous. pg100.txt +ravenously pg3200.txt +ravens pg3200.txt, pg100.txt +ravens. pg3200.txt +ravenshaws? pg31100.txt +ravenspurgh, pg100.txt +ravenspurgh- pg100.txt +ravenspurgh. pg100.txt +ravenspurgh; pg100.txt +ravenstein pg3200.txt +ravenstein; pg3200.txt +raves pg100.txt +raves, pg100.txt +ravin pg100.txt +ravine pg3200.txt +ravine, pg3200.txt +ravine. pg3200.txt +ravines pg3200.txt +ravines--here pg3200.txt +raving pg3200.txt, pg100.txt +ravish'd pg100.txt +ravish'd! pg100.txt +ravished; pg100.txt +ravished? pg100.txt +ravisher. pg100.txt +ravishing pg3200.txt +ravishing, pg3200.txt +raw pg3200.txt, pg100.txt +raw, pg3200.txt, pg100.txt +raw. pg3200.txt, pg100.txt +rawhead-and- pg3200.txt +ray pg3200.txt +ray'd? pg100.txt +rayed pg3200.txt +raymond. pg3200.txt +raynauld pg3200.txt +rays pg31100.txt, pg3200.txt +rays, pg3200.txt +raz'd pg100.txt +raz'd. pg100.txt +raze pg100.txt +razor pg3200.txt +razor; pg3200.txt +razors pg3200.txt +re- pg3200.txt +re-affirms pg3200.txt +re-answer, pg100.txt +re-arranged. pg3200.txt +re-assembled pg31100.txt +re-awakened; pg3200.txt +re-deliver. pg100.txt +re-discovery pg3200.txt +re-dressing, pg3200.txt +re-edified. pg100.txt +re-edified; pg100.txt +re-elect pg3200.txt +re-election; pg3200.txt +re-entering pg31100.txt +re-established. pg31100.txt +re-establishing pg31100.txt +re-examine pg3200.txt +re-exchange." pg3200.txt +re-named--the pg3200.txt +re-rant pg31100.txt +re-re-re-re-re-reparentheses, pg3200.txt +re-reading pg3200.txt +re-restoration pg3200.txt +re-send; pg100.txt +re-simplifying pg3200.txt +re-stem pg100.txt +re-survey pg100.txt +re-urged--she pg31100.txt +re-writing pg3200.txt +reach pg31100.txt, pg3200.txt, pg100.txt +reach, pg31100.txt, pg3200.txt, pg100.txt +reach--like pg3200.txt +reach--she pg3200.txt +reach. pg31100.txt, pg3200.txt, pg100.txt +reach; pg31100.txt +reachable pg3200.txt +reached pg31100.txt, pg3200.txt +reached, pg3200.txt +reached--description pg3200.txt +reached--duels pg3200.txt +reached. pg31100.txt, pg3200.txt +reached; pg3200.txt +reaches pg3200.txt +reaching pg31100.txt, pg3200.txt +read pg31100.txt, pg3200.txt, pg100.txt +read! pg3200.txt, pg100.txt +read!" pg3200.txt +read!* pg100.txt +read, pg31100.txt, pg3200.txt, pg100.txt +read- pg100.txt +read--or pg31100.txt +read-up, pg3200.txt +read. pg31100.txt, pg3200.txt, pg100.txt +read." pg31100.txt, pg3200.txt +read: pg3200.txt, pg100.txt +read; pg31100.txt, pg3200.txt, pg100.txt +read? pg3200.txt, pg100.txt +read?" pg31100.txt, pg3200.txt +read]. pg3200.txt +readable. pg31100.txt +readable.' pg3200.txt +reader pg3200.txt, pg100.txt +reader's pg3200.txt +reader's-- pg3200.txt +reader, pg3200.txt +reader-- pg3200.txt +reader. pg3200.txt +reader.] pg3200.txt +reader: pg3200.txt +reader; pg3200.txt +readers pg3200.txt +readers, pg31100.txt +readers. pg3200.txt +readers." pg3200.txt +readers; pg3200.txt +readiest pg31100.txt +readily pg31100.txt, pg3200.txt +readily. pg31100.txt, pg3200.txt +readiness pg31100.txt, pg3200.txt, pg100.txt +readiness, pg3200.txt, pg100.txt +readiness. pg100.txt +readiness; pg31100.txt, pg100.txt +reading pg31100.txt, pg3200.txt, pg100.txt +reading! pg100.txt +reading, pg31100.txt, pg3200.txt, pg100.txt +reading-matter pg3200.txt +reading-matter. pg3200.txt +reading-matter; pg3200.txt +reading-tour-dreading pg3200.txt +reading-trip pg3200.txt +reading. pg31100.txt, pg100.txt +reading." pg31100.txt +reading: pg3200.txt +reading; pg3200.txt +reading? pg100.txt +reading?" pg31100.txt +readings pg3200.txt +readings, pg3200.txt +readings. pg3200.txt +readings." pg3200.txt +readins, pg100.txt +reads pg31100.txt, pg3200.txt, pg100.txt +reads, pg3200.txt +reads. pg3200.txt, pg100.txt +reads: pg3200.txt +ready pg31100.txt, pg3200.txt, pg100.txt +ready!" pg3200.txt +ready"; pg3200.txt +ready, pg31100.txt, pg3200.txt, pg100.txt +ready,' pg3200.txt +ready--and pg3200.txt +ready--sound pg3200.txt +ready. pg31100.txt, pg3200.txt, pg100.txt +ready." pg31100.txt, pg3200.txt +ready.' pg3200.txt +ready; pg3200.txt, pg100.txt +ready? pg3200.txt, pg100.txt +real pg31100.txt, pg3200.txt +real, pg31100.txt, pg3200.txt +real----" pg3200.txt +real-estate pg3200.txt +real. pg3200.txt +real." pg3200.txt +realise pg3200.txt +realise, pg3200.txt +realised pg31100.txt, pg3200.txt +realised. pg3200.txt +realism. pg3200.txt +realities pg3200.txt +realities, pg3200.txt +realities. pg3200.txt +reality pg3200.txt +reality! pg3200.txt +reality, pg3200.txt +reality. pg3200.txt +reality." pg31100.txt +realization pg3200.txt +realize pg3200.txt +realized pg3200.txt +realized, pg3200.txt +realizes pg3200.txt +realizing pg3200.txt +realizing, pg3200.txt +really pg31100.txt, pg3200.txt +really, pg31100.txt +really--if pg31100.txt +really. pg100.txt +really?" pg31100.txt, pg3200.txt +realm pg3200.txt, pg100.txt +realm! pg100.txt +realm, pg100.txt +realm. pg3200.txt, pg100.txt +realm." pg3200.txt +realm; pg100.txt +realms. pg100.txt +realms." pg3200.txt +reams pg3200.txt +reap pg3200.txt +reap'd pg100.txt +reap, pg100.txt +reaped pg3200.txt +reapers pg100.txt +reappearance pg3200.txt +reappeared, pg31100.txt, pg3200.txt +rear pg3200.txt, pg100.txt +rear'd pg100.txt +rear'd, pg100.txt +rear, pg3200.txt, pg100.txt +rear. pg3200.txt +rear? pg100.txt +reared pg3200.txt +reared, pg3200.txt +reared. pg3200.txt +rearguard pg3200.txt +rearing pg3200.txt +rearranged pg3200.txt +rearward pg3200.txt, pg100.txt +rearward, pg3200.txt +rearwards pg3200.txt +reason pg31100.txt, pg3200.txt, pg100.txt +reason! pg100.txt +reason'd? pg100.txt +reason, pg31100.txt, pg3200.txt, pg100.txt +reason- pg100.txt +reason--that pg3200.txt +reason. pg31100.txt, pg3200.txt, pg100.txt +reason." pg31100.txt, pg3200.txt +reason: pg3200.txt, pg100.txt +reason; pg3200.txt, pg100.txt +reason? pg3200.txt, pg100.txt +reason?" pg31100.txt, pg3200.txt +reasonable pg31100.txt, pg3200.txt, pg100.txt +reasonable, pg31100.txt, pg3200.txt, pg100.txt +reasonable. pg31100.txt, pg100.txt +reasonable; pg100.txt +reasonableness pg31100.txt +reasonably pg31100.txt, pg3200.txt +reasoned pg31100.txt, pg3200.txt +reasoning pg3200.txt +reasoning, pg3200.txt +reasoning. pg31100.txt +reasoning; pg31100.txt +reasonings pg31100.txt, pg3200.txt +reasonless. pg100.txt +reasons pg31100.txt, pg3200.txt, pg100.txt +reasons! pg100.txt +reasons!" pg31100.txt +reasons, pg3200.txt, pg100.txt +reasons--that pg3200.txt +reasons. pg31100.txt, pg3200.txt, pg100.txt +reasons: pg3200.txt, pg100.txt +reasons; pg3200.txt, pg100.txt +reasons? pg3200.txt +reasons?" pg3200.txt +reassembled, pg3200.txt +reassuring. pg3200.txt +reave pg100.txt +rebandaged, pg3200.txt +rebecca pg31100.txt +rebecca's pg31100.txt +rebecca, pg31100.txt, pg3200.txt +rebeck? pg100.txt +rebel pg31100.txt, pg3200.txt, pg100.txt +rebel! pg100.txt +rebel, pg3200.txt, pg100.txt +rebel-like, pg100.txt +rebel. pg3200.txt +rebel; pg3200.txt +rebell'd; pg100.txt +rebelled, pg3200.txt +rebellion pg3200.txt, pg100.txt +rebellion! pg100.txt +rebellion, pg100.txt +rebellion. pg100.txt +rebellion; pg3200.txt +rebels pg3200.txt, pg100.txt +rebels' pg100.txt +rebels. pg3200.txt, pg100.txt +rebels; pg3200.txt +rebels? pg100.txt +rebound, pg3200.txt +rebounds pg3200.txt +rebuilds pg3200.txt +rebuilt pg3200.txt +rebuilt. pg3200.txt +rebuke pg31100.txt, pg3200.txt, pg100.txt +rebuke, pg3200.txt, pg100.txt +rebuke. pg100.txt +rebuke; pg3200.txt +rebuke? pg3200.txt +rebukeable, pg100.txt +rebuked pg3200.txt +rebuked, pg100.txt +rebuked: pg3200.txt +rebuking pg3200.txt +recall pg31100.txt, pg3200.txt +recall'd pg100.txt +recall: pg31100.txt +recalled pg31100.txt, pg3200.txt +recalled, pg31100.txt +recalling pg31100.txt, pg3200.txt +recalls pg3200.txt +recant pg100.txt +recantation. pg100.txt +recanting pg31100.txt +recapitulate: pg3200.txt +recapitulation pg31100.txt +recapture. pg3200.txt +recaptured pg3200.txt +recede pg3200.txt +recede. pg3200.txt +receding pg3200.txt +receding, pg3200.txt +receipt pg31100.txt, pg3200.txt, pg100.txt +receipt, pg100.txt +receipt." pg3200.txt +receipts pg3200.txt +receipts, pg3200.txt +receiv'd pg100.txt +receiv'd, pg100.txt +receiv'd. pg100.txt +receiv- pg100.txt +receive pg31100.txt, pg3200.txt, pg100.txt +receive, pg3200.txt, pg100.txt +receive. pg31100.txt, pg3200.txt, pg100.txt +receive; pg31100.txt +receive? pg3200.txt, pg100.txt +received pg31100.txt, pg3200.txt, pg100.txt +received, pg31100.txt, pg3200.txt, pg100.txt +received--" pg31100.txt +received. pg31100.txt, pg3200.txt +received." pg3200.txt +received.' pg3200.txt +receiver pg3200.txt +receivers. pg3200.txt +receives pg3200.txt, pg100.txt +receives, pg100.txt +receives. pg100.txt +receivest pg100.txt +receivest, pg100.txt +receiving pg31100.txt, pg3200.txt, pg100.txt +receiving, pg31100.txt +receiving. pg3200.txt +recent pg31100.txt, pg3200.txt +recent, pg3200.txt +recent. pg3200.txt +recently pg31100.txt, pg3200.txt +recently." pg3200.txt +recently; pg3200.txt +receptacle pg100.txt +receptacle, pg100.txt +receptacles pg3200.txt +reception pg31100.txt, pg3200.txt +reception, pg31100.txt, pg3200.txt +reception-room. pg3200.txt +reception. pg31100.txt, pg3200.txt +reception." pg3200.txt +receptions pg3200.txt +receptive. pg3200.txt +recess pg3200.txt +recess, pg3200.txt +recesses pg3200.txt +recesses; pg3200.txt +recipient pg3200.txt +reciprocally- pg100.txt +reciprocate--" pg3200.txt +recital pg31100.txt, pg3200.txt +recital. pg31100.txt +recitals pg31100.txt +recitation, pg3200.txt +recitative--down pg3200.txt +recite pg3200.txt +recite, pg100.txt +recited pg3200.txt +reciter pg3200.txt +reciting pg3200.txt +reciting. pg3200.txt +reck'n pg3200.txt +reck'ning. pg100.txt +reck'ning? pg100.txt +reckless pg3200.txt, pg100.txt +reckless--choose pg3200.txt +reckless. pg3200.txt +recklessly pg3200.txt +recklessness pg3200.txt +reckon pg3200.txt, pg100.txt +reckon! pg3200.txt +reckon'd pg100.txt +reckon'd. pg100.txt +reckon, pg3200.txt +reckon--father's pg3200.txt +reckon--when pg3200.txt +reckon. pg3200.txt, pg100.txt +reckon." pg3200.txt +reckon.' pg3200.txt +reckon; pg3200.txt +reckon? pg3200.txt +reckon?" pg31100.txt, pg3200.txt +reckonable pg3200.txt +reckoned pg31100.txt, pg3200.txt +reckoned. pg3200.txt +reckoned." pg3200.txt +reckoning pg100.txt +reckoning! pg100.txt +reckoning, pg31100.txt +reckoning. pg3200.txt, pg100.txt +reckonings pg31100.txt +reckonings. pg3200.txt, pg100.txt +reclaim'd pg100.txt +reclaim'd. pg100.txt +reclaims, pg100.txt +recluse, pg3200.txt +recognisable pg3200.txt +recognise pg3200.txt +recognised pg31100.txt, pg3200.txt +recognition pg31100.txt, pg3200.txt +recognition, pg3200.txt +recognition--one pg3200.txt +recognition. pg3200.txt +recognition: pg3200.txt +recognizable pg3200.txt +recognizable, pg3200.txt +recognizable. pg3200.txt +recognizable." pg3200.txt +recognizances, pg100.txt +recognize pg3200.txt +recognize. pg3200.txt +recognized pg3200.txt +recognized, pg3200.txt +recognized. pg3200.txt +recognized; pg3200.txt +recognizes pg3200.txt +recognizing pg31100.txt, pg3200.txt +recoil pg100.txt +recoil, pg100.txt +recollect pg31100.txt, pg3200.txt +recollect, pg31100.txt +recollect. pg31100.txt, pg3200.txt +recollect." pg31100.txt +recollect; pg31100.txt +recollect?--harriet pg31100.txt +recollected pg31100.txt, pg3200.txt +recollected, pg31100.txt +recollecting pg31100.txt +recollection pg31100.txt, pg3200.txt +recollection, pg31100.txt, pg3200.txt +recollection-- pg31100.txt +recollection--that pg31100.txt +recollection. pg31100.txt, pg3200.txt +recollection." pg31100.txt +recollections pg31100.txt, pg3200.txt +recollections. pg31100.txt +recollections; pg31100.txt +recollects pg3200.txt +recomforture. pg100.txt +recommenced. pg31100.txt +recommenced: pg3200.txt +recommencing, pg31100.txt +recommend pg31100.txt, pg3200.txt +recommend?" pg3200.txt +recommendation pg31100.txt, pg3200.txt +recommendation, pg31100.txt +recommendation. pg31100.txt +recommendation?" pg31100.txt +recommendations pg31100.txt, pg3200.txt +recommendations--how pg3200.txt +recommendations. pg31100.txt +recommended pg31100.txt +recommended--" pg3200.txt +recommended. pg31100.txt, pg3200.txt +recommending pg31100.txt +recompens'd. pg100.txt +recompense pg100.txt +recompense, pg100.txt +recompense. pg100.txt +reconcil'd pg100.txt +reconcil'd. pg100.txt +reconcile pg31100.txt, pg3200.txt, pg100.txt +reconcile. pg100.txt +reconciled pg31100.txt, pg3200.txt, pg100.txt +reconciled, pg3200.txt +reconcilement pg100.txt +reconciles pg3200.txt +reconciliation pg3200.txt +reconciliation, pg31100.txt, pg3200.txt +reconciliation. pg3200.txt +reconciliation; pg31100.txt +reconnize pg3200.txt +reconsider pg31100.txt +reconsiders pg3200.txt +reconstruction. pg3200.txt +reconstructs pg3200.txt +record pg3200.txt, pg100.txt +record' pg3200.txt +record, pg3200.txt, pg100.txt +record," pg3200.txt +record. pg3200.txt, pg100.txt +record." pg3200.txt +record; pg3200.txt +recorded pg3200.txt, pg100.txt +recorded. pg3200.txt, pg100.txt +recorded." pg3200.txt +recorder's pg3200.txt +recorder's. pg3200.txt +recorder. pg3200.txt, pg100.txt +recorders! pg100.txt +recorders. pg100.txt +recording pg3200.txt +records pg31100.txt, pg3200.txt, pg100.txt +records, pg3200.txt, pg100.txt +records. pg3200.txt +records." pg3200.txt +recount pg100.txt +recounted, pg100.txt +recounting pg3200.txt, pg100.txt +recourse pg31100.txt +recover pg31100.txt, pg3200.txt, pg100.txt +recover'd pg100.txt +recover'd. pg100.txt +recover, pg3200.txt +recover-- pg31100.txt +recover. pg100.txt +recover.' pg3200.txt +recoverable. pg100.txt +recovered pg31100.txt, pg3200.txt +recovered, pg31100.txt +recovered. pg3200.txt, pg100.txt +recovered.' pg3200.txt +recovered.--how pg31100.txt +recovered? pg100.txt +recoveries, pg3200.txt +recovering pg31100.txt +recovering; pg3200.txt +recovers. pg100.txt +recovery pg31100.txt, pg100.txt +recovery, pg31100.txt, pg3200.txt +recovery,--wonder pg31100.txt +recovery. pg31100.txt, pg100.txt +recovery." pg31100.txt +recovery.' pg100.txt +recovery; pg100.txt +recovery? pg100.txt +recreant pg3200.txt +recreant! pg100.txt +recreant, pg100.txt +recreants! pg100.txt +recreating pg31100.txt +recreation pg3200.txt, pg100.txt +recreation, pg3200.txt, pg100.txt +recreation-seekers pg3200.txt +recreation. pg3200.txt, pg100.txt +recrossed pg3200.txt +recruit pg3200.txt +recruit, pg3200.txt +recruit--and pg3200.txt +recruiter pg3200.txt +recruiters. pg3200.txt +recruiting pg3200.txt +recruiting-camp pg3200.txt +recruits pg3200.txt +recruits. pg3200.txt +rectified pg3200.txt +rectified. pg31100.txt +rectify pg31100.txt, pg3200.txt, pg100.txt +rector, pg31100.txt +recuperate, pg3200.txt +recuperative pg3200.txt +recur pg3200.txt +recured, pg100.txt +recurred pg3200.txt +red pg31100.txt, pg3200.txt, pg100.txt +red, pg3200.txt, pg100.txt +red- pg3200.txt +red-and-gold pg3200.txt +red-capped pg3200.txt +red-haired, pg3200.txt +red-headed, pg3200.txt +red-hot pg3200.txt +red-hot, pg100.txt +red-light pg3200.txt +red-tiled pg3200.txt +red-vested pg3200.txt +red. pg3200.txt, pg100.txt +red; pg3200.txt +red? pg100.txt +redan, pg3200.txt +redbreast-teacher. pg100.txt +reddened pg3200.txt +reddening. pg3200.txt +redder, pg3200.txt +redding pg3200.txt +redding, pg3200.txt +redding: pg3200.txt +reddish pg3200.txt +rede. pg100.txt +redeem pg3200.txt, pg100.txt +redeem, pg100.txt +redeemed pg3200.txt +redeemer, pg3200.txt, pg100.txt +redeeming pg3200.txt +redemption pg3200.txt, pg100.txt +redemption! pg100.txt +redemption. pg100.txt +redemption; pg3200.txt +rediscovery pg3200.txt +redistribute pg31100.txt, pg3200.txt, pg100.txt +redistribution pg31100.txt, pg3200.txt +redistribution. pg31100.txt, pg3200.txt, pg100.txt +redoubled! pg100.txt +redoubled, pg100.txt +redpath pg3200.txt +redress pg100.txt +redress!" pg100.txt +redress'd, pg100.txt +redress'd; pg100.txt +redress, pg100.txt +redress- pg100.txt +redress. pg100.txt +redress; pg100.txt +redress? pg100.txt +redress?" pg31100.txt, pg3200.txt +redressed; pg100.txt +redresses. pg100.txt +reds pg3200.txt +reduce pg3200.txt +reduced pg31100.txt, pg3200.txt +reduced, pg3200.txt +reduced; pg31100.txt +reduces pg3200.txt +reduction pg31100.txt, pg3200.txt +reductions pg3200.txt +reductions. pg31100.txt +reductions; pg31100.txt +reed pg3200.txt +reed. pg3200.txt +reeder pg3200.txt +reeder, pg3200.txt +reeds pg100.txt +reeds. pg3200.txt +reef pg3200.txt +reef--the pg3200.txt +reef. pg3200.txt +reef.' pg3200.txt +reefing, pg3200.txt +reefs pg3200.txt +reefs, pg3200.txt +reek pg100.txt +reek, pg100.txt +reeking pg3200.txt +reeks. pg100.txt +reel pg3200.txt +reel, pg3200.txt +reel?" pg31100.txt +reeled pg3200.txt +reeling pg3200.txt +reels pg100.txt +reels, pg100.txt +reels. pg100.txt +reese pg3200.txt +reeving pg3200.txt +refall pg3200.txt +refer pg3200.txt +reference pg31100.txt, pg3200.txt, pg100.txt +reference; pg3200.txt +references pg3200.txt +referred pg31100.txt, pg3200.txt, pg100.txt +referring pg3200.txt +refers pg3200.txt +refers, pg3200.txt +refilled, pg3200.txt +refin'd pg100.txt +refine pg3200.txt +refine, pg3200.txt +refined pg31100.txt, pg3200.txt +refinement pg31100.txt +refinement, pg3200.txt +refinery, pg3200.txt +refines pg3200.txt +refining, pg3200.txt +reflect pg31100.txt, pg3200.txt, pg100.txt +reflect--now, pg3200.txt +reflected pg31100.txt, pg3200.txt +reflected, pg31100.txt, pg3200.txt +reflected. pg3200.txt +reflecting pg31100.txt, pg3200.txt +reflecting, pg3200.txt +reflection pg31100.txt, pg3200.txt, pg100.txt +reflection, pg31100.txt, pg3200.txt +reflection. pg31100.txt, pg3200.txt +reflection; pg31100.txt, pg100.txt +reflection?' pg3200.txt +reflections pg31100.txt, pg3200.txt +reflections. pg31100.txt +reflections." pg31100.txt +reflections; pg31100.txt +reflective pg3200.txt +reflectively pg3200.txt +reflectively, pg3200.txt +reflectively: pg3200.txt +reflectively; pg3200.txt +reflector. pg3200.txt +reflector? pg3200.txt +reflects pg3200.txt +reform pg3200.txt, pg100.txt +reform! pg3200.txt +reform!" pg3200.txt +reform'd; pg100.txt +reform). pg3200.txt +reform, pg3200.txt +reform. pg3200.txt +reform." pg3200.txt +reform.' pg3200.txt +reform; pg3200.txt +reformation. pg31100.txt, pg100.txt +reformed pg31100.txt, pg3200.txt +reformed, pg3200.txt +reformed. pg3200.txt +reformed." pg3200.txt +reformed?" pg3200.txt +reformer pg3200.txt +reformer, pg3200.txt +reformer. pg3200.txt +reformers pg3200.txt +reformers' pg3200.txt +reformers, pg3200.txt +reformers. pg3200.txt +reforming. pg3200.txt +reforms, pg3200.txt +reforms. pg3200.txt +refractory. pg100.txt +refrain pg31100.txt, pg3200.txt, pg100.txt +refrain, pg100.txt +refrain? pg3200.txt +refrained pg31100.txt, pg3200.txt +refrained. pg3200.txt +refrained: pg3200.txt +refresh pg31100.txt, pg3200.txt +refreshed pg3200.txt +refreshed, pg3200.txt +refreshed. pg3200.txt +refreshed." pg3200.txt +refreshing pg3200.txt +refreshing: pg3200.txt +refreshment pg31100.txt, pg3200.txt +refreshment. pg31100.txt, pg3200.txt +refreshment." pg31100.txt +refreshments pg31100.txt, pg3200.txt +refreshments. pg3200.txt +refrigerator. pg3200.txt +refrigerator; pg3200.txt +reft, pg100.txt +refuge pg3200.txt, pg100.txt +refuge, pg3200.txt +refuge. pg3200.txt, pg100.txt +refugee pg3200.txt +refund pg31100.txt, pg3200.txt, pg100.txt +refus'd pg100.txt +refus'd, pg100.txt +refusal pg31100.txt, pg3200.txt +refusal, pg31100.txt, pg3200.txt +refusal. pg31100.txt +refusals, pg3200.txt +refuse pg31100.txt, pg3200.txt, pg100.txt +refuse, pg100.txt +refuse. pg3200.txt +refuse." pg31100.txt, pg3200.txt +refuse; pg31100.txt +refuse?" pg3200.txt +refused pg3200.txt +refused, pg31100.txt, pg3200.txt, pg100.txt +refused. pg3200.txt +refused." pg31100.txt +refused; pg31100.txt, pg3200.txt +refused?" pg31100.txt +refuses pg3200.txt +refusest. pg100.txt +refusing pg31100.txt, pg3200.txt +refute pg3200.txt +regain pg31100.txt +regained pg31100.txt, pg3200.txt +regal pg3200.txt +regalest pg3200.txt +regalia pg3200.txt, pg100.txt +regan! pg100.txt +regan, pg100.txt +regan. pg100.txt +regan]. pg100.txt +regard pg31100.txt, pg3200.txt, pg100.txt +regard, pg31100.txt, pg3200.txt, pg100.txt +regard. pg31100.txt, pg3200.txt, pg100.txt +regard." pg31100.txt +regard; pg31100.txt +regard? pg31100.txt +regarded pg31100.txt, pg3200.txt, pg100.txt +regarded, pg31100.txt +regarded. pg31100.txt +regarded." pg31100.txt +regardful pg31100.txt +regardfully? pg100.txt +regarding pg31100.txt, pg3200.txt +regardless pg31100.txt, pg3200.txt +regards pg3200.txt, pg100.txt +regards. pg3200.txt +regem.' pg3200.txt +regen, pg3200.txt +regen." pg3200.txt +regenerate pg3200.txt +regenerate, pg100.txt +regens." pg3200.txt +regent pg100.txt +regentship. pg100.txt +regime. pg3200.txt +regiment pg31100.txt, pg3200.txt, pg100.txt +regiment, pg31100.txt, pg100.txt +regiment--a pg3200.txt +regiment--ballou pg3200.txt +regiment. pg31100.txt, pg100.txt +regiment." pg31100.txt, pg3200.txt +regiment; pg100.txt +regimentals." pg31100.txt +regiments pg3200.txt +regiments. pg100.txt +regina pg3200.txt +reginald pg31100.txt +reginald's pg31100.txt +reginald, pg31100.txt +region pg3200.txt, pg100.txt +region, pg3200.txt, pg100.txt +region-- pg3200.txt +region. pg3200.txt +region." pg31100.txt, pg3200.txt +region; pg3200.txt +region?" pg3200.txt +regions pg3200.txt, pg100.txt +regions! pg100.txt +regions, pg3200.txt +regions. pg3200.txt, pg100.txt +regions.' pg3200.txt +regist'red, pg100.txt +register pg100.txt +register, pg100.txt +register-quick!" pg3200.txt +register. pg3200.txt +register." pg3200.txt +register: pg3200.txt +registered pg3200.txt +registers pg3200.txt +registers! pg3200.txt +regle', pg3200.txt +regle. pg3200.txt +regreet pg100.txt +regreet? pg100.txt +regreets; pg100.txt +regress- pg100.txt +regret pg31100.txt, pg3200.txt +regret, pg31100.txt +regret--namely, pg3200.txt +regret. pg31100.txt, pg3200.txt +regret." pg31100.txt +regret; pg31100.txt +regretful, pg3200.txt +regretfully pg3200.txt +regretfully. pg3200.txt +regretfully: pg3200.txt +regrets pg31100.txt, pg3200.txt +regrets, pg31100.txt +regrets. pg3200.txt +regretted pg31100.txt, pg3200.txt +regretted. pg31100.txt, pg3200.txt +regretted.--the pg31100.txt +regretting pg31100.txt +regular pg31100.txt, pg3200.txt, pg100.txt +regular, pg31100.txt, pg3200.txt +regular. pg3200.txt +regular." pg3200.txt +regularity pg31100.txt, pg3200.txt +regularity; pg31100.txt +regularly pg31100.txt, pg3200.txt +regularly." pg3200.txt +regularly.' pg3200.txt +regulated, pg31100.txt +regulated. pg3200.txt +regulated." pg31100.txt +regulating pg31100.txt, pg3200.txt, pg100.txt +regulating, pg3200.txt +regulating--come pg3200.txt +regulation pg31100.txt, pg3200.txt +regulation." pg31100.txt +regulations pg3200.txt +regulator. pg3200.txt +rehabilitate pg3200.txt +rehabilitated pg3200.txt +rehabilitation. pg3200.txt +rehears'd- pg100.txt +rehears'd. pg100.txt +rehearsal pg31100.txt +rehearse, pg100.txt +rehearse. pg100.txt +rehearse: pg100.txt +rehearse; pg100.txt +rehearse? pg100.txt +reichsrath pg3200.txt +reichsrath. pg3200.txt +reid pg3200.txt +reign pg31100.txt, pg3200.txt, pg100.txt +reign'd. pg100.txt +reign) pg100.txt +reign, pg31100.txt, pg3200.txt, pg100.txt +reign. pg31100.txt, pg3200.txt, pg100.txt +reign; pg100.txt +reigned pg3200.txt +reigned! pg3200.txt +reigned, pg3200.txt, pg100.txt +reigned. pg3200.txt +reignier pg100.txt +reignier, pg100.txt +reigning-there. pg100.txt +reigns pg3200.txt, pg100.txt +reigns!" pg31100.txt +reigns, pg3200.txt +reigns. pg3200.txt, pg100.txt +reigns; pg100.txt +reimburse pg3200.txt +rein pg31100.txt, pg3200.txt, pg100.txt +rein: pg100.txt +reindeer pg3200.txt +reindorsed pg3200.txt +reined pg31100.txt +reinforced pg3200.txt +reinforced. pg3200.txt +reinforcement pg3200.txt +reinforcing pg3200.txt +reins pg3200.txt +reins, pg3200.txt +reinstate pg31100.txt +reinstated. pg3200.txt +reinvented pg3200.txt +reisen. pg3200.txt +reisetasche? pg3200.txt +reiterate pg3200.txt +reiterated, pg3200.txt +reject pg31100.txt, pg3200.txt, pg100.txt +rejected pg31100.txt, pg3200.txt +rejected, pg3200.txt +rejected. pg3200.txt +rejection; pg3200.txt +rejoibings! pg3200.txt +rejoice pg31100.txt, pg3200.txt, pg100.txt +rejoice! pg3200.txt +rejoice, pg100.txt +rejoiced pg31100.txt, pg3200.txt +rejoiced. pg31100.txt +rejoices pg3200.txt +rejoices.' pg3200.txt +rejoiceth pg100.txt +rejoicing pg31100.txt, pg3200.txt, pg100.txt +rejoicing, pg3200.txt +rejoicing. pg3200.txt +rejoicing? pg100.txt +rejoicings, pg3200.txt +rejoicings. pg3200.txt +rejoinder pg3200.txt +rejoinder, pg3200.txt +rejoined pg31100.txt, pg3200.txt +rejoined: pg3200.txt +rejuvenation pg3200.txt +rekonsyled pg3200.txt +relapse. pg3200.txt +relapse? pg3200.txt +relapsed. pg3200.txt +relapsing. pg3200.txt +relate pg31100.txt, pg3200.txt +relate! pg31100.txt +relate, pg31100.txt, pg100.txt +relate. pg100.txt +relate." pg3200.txt +relate? pg31100.txt +related pg31100.txt, pg3200.txt +related." pg31100.txt +related?" pg31100.txt +relates pg31100.txt +relating pg31100.txt +relation pg31100.txt, pg3200.txt, pg100.txt +relation, pg31100.txt +relation. pg31100.txt, pg3200.txt +relation; pg31100.txt +relation? pg31100.txt, pg100.txt +relations pg31100.txt, pg3200.txt, pg100.txt +relations! pg31100.txt +relations' pg31100.txt +relations, pg31100.txt, pg3200.txt +relations. pg31100.txt, pg3200.txt +relations." pg31100.txt +relations?' pg31100.txt +relationship pg31100.txt, pg3200.txt +relationship!--no pg31100.txt +relationship. pg3200.txt +relationship." pg31100.txt +relationship.' pg3200.txt +relationships pg3200.txt +relative pg31100.txt, pg3200.txt +relative, pg3200.txt +relative. pg3200.txt +relative.' pg3200.txt +relatively pg3200.txt +relatives pg3200.txt +relatives, pg3200.txt +relatives--or pg3200.txt +relatives. pg3200.txt +relaxation pg3200.txt +relaxation. pg3200.txt +relaxed pg3200.txt +relearning pg3200.txt +releas'd pg100.txt +releas'd? pg100.txt +release pg31100.txt, pg3200.txt, pg100.txt +release. pg3200.txt +released pg31100.txt, pg3200.txt +released, pg31100.txt +released. pg3200.txt +released; pg3200.txt +releases pg3200.txt +releasing: pg100.txt +relegate pg3200.txt +releif pg31100.txt +releive pg31100.txt +releiving pg31100.txt +relent pg100.txt +relent. pg3200.txt, pg100.txt +relent; pg100.txt +relent? pg100.txt +relented pg3200.txt +relenting pg31100.txt +relenting, pg31100.txt +relentless pg3200.txt +relevancy pg3200.txt +relevancy, pg3200.txt +relevancy." pg3200.txt +relevancy; pg3200.txt +relevant pg3200.txt +reliable pg3200.txt +reliable." pg3200.txt +reliable: pg3200.txt +reliance pg31100.txt, pg3200.txt +relic pg3200.txt +relic, pg3200.txt +relic-hunters pg3200.txt +relic-peddlers pg3200.txt +relic. pg3200.txt +relic." pg3200.txt +relics pg3200.txt, pg100.txt +relics! pg3200.txt +relics, pg3200.txt +relics. pg3200.txt +relics: pg3200.txt +relict. pg3200.txt +relied pg31100.txt, pg3200.txt +relief pg31100.txt, pg3200.txt, pg100.txt +relief! pg31100.txt, pg3200.txt +relief, pg31100.txt, pg3200.txt, pg100.txt +relief--and pg3200.txt +relief-map, pg3200.txt +relief. pg31100.txt, pg3200.txt, pg100.txt +relief." pg31100.txt +relief: pg3200.txt +relief; pg31100.txt +reliev'd, pg100.txt +relieve pg31100.txt, pg3200.txt, pg100.txt +relieve, pg31100.txt +relieved pg31100.txt, pg3200.txt +relieved, pg31100.txt +relieved. pg3200.txt +relieved." pg3200.txt +relieves pg31100.txt, pg3200.txt +relieving pg3200.txt +religieuse pg3200.txt +religion pg3200.txt, pg100.txt +religion"--after pg3200.txt +religion, pg31100.txt, pg3200.txt, pg100.txt +religion--arrival pg3200.txt +religion. pg3200.txt, pg100.txt +religion." pg3200.txt +religion; pg3200.txt +religion?" pg3200.txt +religion?' pg3200.txt +religionists. pg3200.txt +religions. pg3200.txt +religious pg3200.txt, pg100.txt +religious, pg3200.txt, pg100.txt +religious-wise. pg3200.txt +religious. pg31100.txt +religious; pg3200.txt, pg100.txt +religious? pg100.txt +religious?" pg3200.txt +relinquish pg3200.txt +relinquished, pg31100.txt +relinquishment. pg31100.txt +reliquit, pg100.txt +relish pg31100.txt, pg3200.txt, pg100.txt +relish, pg3200.txt +relish--" pg3200.txt +relish. pg3200.txt +relish: pg3200.txt +relished pg31100.txt +reloaded, pg3200.txt +reloads.] pg3200.txt +relocated!" pg3200.txt +reluctance pg31100.txt, pg3200.txt +reluctance, pg31100.txt, pg3200.txt +reluctance. pg31100.txt, pg3200.txt +reluctant pg31100.txt, pg3200.txt +reluctant, pg3200.txt +reluctantly. pg3200.txt +reluctantly: pg3200.txt +rely pg3200.txt +rely. pg100.txt +rely." pg3200.txt +relying pg31100.txt +remain pg31100.txt, pg3200.txt, pg100.txt +remain! pg100.txt +remain, pg3200.txt, pg100.txt +remain-- pg31100.txt +remain. pg31100.txt, pg3200.txt, pg100.txt +remain." pg3200.txt +remain; pg3200.txt, pg100.txt +remainder pg31100.txt, pg3200.txt +remainder. pg3200.txt +remainders pg3200.txt +remainders. pg3200.txt +remained pg31100.txt, pg3200.txt +remained, pg31100.txt, pg3200.txt +remained--he pg3200.txt +remained. pg3200.txt +remaining pg31100.txt, pg3200.txt, pg100.txt +remaining, pg100.txt +remaining--one pg3200.txt +remaining. pg3200.txt +remaining? pg100.txt +remains pg31100.txt, pg3200.txt, pg100.txt +remains! pg3200.txt +remains, pg3200.txt, pg100.txt +remains. pg3200.txt, pg100.txt +remains." pg31100.txt, pg3200.txt +remains; pg3200.txt, pg100.txt +remains? pg100.txt +remains?" pg3200.txt +remake pg3200.txt +remark pg31100.txt, pg3200.txt +remark, pg31100.txt, pg3200.txt +remark-- pg3200.txt +remark-two pg3200.txt +remark. pg31100.txt, pg3200.txt +remark: pg3200.txt +remark:-- pg31100.txt +remark; pg3200.txt +remark?" pg3200.txt +remarkable pg31100.txt, pg3200.txt, pg100.txt +remarkable, pg31100.txt, pg3200.txt +remarkable. pg31100.txt, pg3200.txt +remarkable.' pg3200.txt +remarkable?" pg3200.txt +remarkables, pg3200.txt +remarkably pg31100.txt, pg3200.txt +remarked pg3200.txt +remarked, pg3200.txt +remarked-- pg3200.txt +remarked. pg3200.txt +remarked: pg3200.txt +remarked; pg3200.txt +remarking pg3200.txt +remarking, pg3200.txt +remarks pg31100.txt, pg3200.txt +remarks" pg3200.txt +remarks, pg3200.txt +remarks. pg3200.txt +remarks." pg31100.txt +remarks: pg3200.txt +rembrandt pg3200.txt +remediate pg100.txt +remedied, pg3200.txt +remedied. pg100.txt +remedies pg31100.txt, pg3200.txt, pg100.txt +remedies, pg3200.txt +remedies. pg31100.txt, pg100.txt +remedy pg31100.txt, pg3200.txt, pg100.txt +remedy, pg3200.txt, pg100.txt +remedy. pg3200.txt, pg100.txt +remedy." pg3200.txt +remedy: pg3200.txt +remedy? pg100.txt +rememb'red pg100.txt +rememb'red, pg100.txt +rememb'red. pg100.txt +rememb'red; pg100.txt +rememb'rest pg100.txt +remember pg31100.txt, pg3200.txt, pg100.txt +remember!" pg3200.txt +remember, pg31100.txt, pg3200.txt, pg100.txt +remember- pg100.txt +remember--ah, pg3200.txt +remember--draw pg3200.txt +remember. pg3200.txt, pg100.txt +remember." pg31100.txt, pg3200.txt +remember: pg3200.txt +remember? pg100.txt +remembered pg31100.txt, pg3200.txt, pg100.txt +remembered, pg31100.txt, pg3200.txt, pg100.txt +remembered- pg100.txt +remembered. pg31100.txt, pg3200.txt +remembering pg31100.txt, pg3200.txt +remembering. pg3200.txt +remembers pg3200.txt, pg100.txt +remembrance pg31100.txt, pg3200.txt, pg100.txt +remembrance, pg31100.txt, pg3200.txt, pg100.txt +remembrance. pg31100.txt, pg3200.txt, pg100.txt +remembrance." pg31100.txt +remembrance: pg3200.txt +remembrance; pg100.txt +remembrance? pg100.txt +remembrancer pg3200.txt +remembrancer! pg100.txt +remembrancer?" pg3200.txt +remembrances pg31100.txt, pg3200.txt, pg100.txt +remembrances, pg31100.txt +remembrances. pg31100.txt, pg3200.txt, pg100.txt +remercimens; pg100.txt +remi, pg3200.txt +remi. pg3200.txt +remind pg31100.txt, pg3200.txt +reminded pg31100.txt, pg3200.txt +reminded. pg3200.txt +reminder pg3200.txt +reminders. pg3200.txt +reminding pg31100.txt, pg3200.txt +reminds pg31100.txt, pg3200.txt +reminiscence pg3200.txt +reminiscence, pg3200.txt +reminiscences pg3200.txt +reminiscences, pg3200.txt +reminiscent pg3200.txt +remise, pg3200.txt +remised, pg3200.txt +remiss, pg31100.txt, pg100.txt +remiss; pg31100.txt, pg3200.txt, pg100.txt +remission. pg100.txt +remit pg100.txt +remitted pg3200.txt +remitted." pg3200.txt +remnant pg3200.txt +remnant, pg3200.txt +remnant. pg3200.txt +remnant; pg100.txt +remnants pg3200.txt, pg100.txt +remonstrance pg31100.txt +remonstrance. pg31100.txt +remonstrated pg3200.txt +remorse pg3200.txt, pg100.txt +remorse, pg100.txt +remorse. pg31100.txt, pg3200.txt, pg100.txt +remorse." pg3200.txt +remorse; pg100.txt +remorse? pg100.txt +remorseful pg3200.txt +remorseful. pg3200.txt +remorseless pg3200.txt +remorseless. pg100.txt +remote pg3200.txt, pg100.txt +remote, pg3200.txt +remote. pg100.txt +remote; pg3200.txt +remotely pg3200.txt +remoteness pg31100.txt +remotenesses pg3200.txt +remoter pg3200.txt +remotest pg3200.txt +remov'd pg100.txt +remov'd, pg100.txt +remov'd. pg100.txt +remov'd; pg100.txt +removable pg3200.txt +removable; pg3200.txt +removal pg31100.txt +removal, pg31100.txt +removal." pg31100.txt +removal; pg31100.txt +removals pg31100.txt +remove pg31100.txt, pg3200.txt, pg100.txt +remove! pg100.txt +remove, pg31100.txt, pg100.txt +remove. pg31100.txt, pg100.txt +remove." pg31100.txt +remove; pg31100.txt +removed pg31100.txt, pg3200.txt, pg100.txt +removed, pg31100.txt, pg3200.txt, pg100.txt +removed. pg31100.txt, pg3200.txt, pg100.txt +removed." pg31100.txt +removed; pg31100.txt, pg3200.txt +removes pg3200.txt +removes. pg100.txt +removing pg31100.txt, pg100.txt +removing; pg31100.txt +remunerate. pg100.txt +remuneration pg3200.txt, pg100.txt +remuneration. pg100.txt +remuneration; pg100.txt +remuneration? pg100.txt +remus pg3200.txt +remy, pg3200.txt +renaissance pg3200.txt +renaissance. pg3200.txt +renaissance." pg3200.txt +renamed pg3200.txt +renamed. pg31100.txt, pg3200.txt, pg100.txt +rend pg3200.txt, pg100.txt +rend'red pg100.txt +render pg31100.txt, pg3200.txt, pg100.txt +render'd. pg100.txt +render, pg31100.txt, pg100.txt +render- pg100.txt +rendered pg31100.txt, pg3200.txt, pg100.txt +rendered. pg3200.txt, pg100.txt +rendering pg31100.txt, pg3200.txt +rendezvous pg31100.txt, pg100.txt +rendezvous. pg100.txt +rendezvoused, pg3200.txt +renew pg3200.txt +renewal pg31100.txt +renewal. pg31100.txt, pg3200.txt +renewed pg31100.txt, pg3200.txt +renewed, pg100.txt +renewed. pg31100.txt +renewed." pg3200.txt +renewed?" pg3200.txt +renewest, pg100.txt +renewing pg31100.txt +reno, pg3200.txt +renounce pg3200.txt +renown pg3200.txt, pg100.txt +renown! pg3200.txt +renown'd pg100.txt +renown'd, pg100.txt +renown, pg100.txt +renown--one pg3200.txt +renown. pg3200.txt +renown; pg31100.txt +renowned pg3200.txt, pg100.txt +rent pg31100.txt, pg3200.txt, pg100.txt +rent, pg3200.txt +rent-free, pg3200.txt +rented pg3200.txt +rents, pg100.txt +rents. pg100.txt +reorganize pg3200.txt +reorganized pg3200.txt +reorganized, pg3200.txt +reorganizing pg3200.txt +repaid pg31100.txt, pg3200.txt, pg100.txt +repaid; pg100.txt +repaid?" pg31100.txt +repair pg3200.txt, pg100.txt +repair, pg31100.txt, pg3200.txt, pg100.txt +repair. pg3200.txt, pg100.txt +repair." pg31100.txt +repair; pg3200.txt +repair? pg100.txt +repair?" pg3200.txt +repaired pg31100.txt +repaired. pg3200.txt +repairing pg3200.txt +repairing. pg3200.txt +repairs pg100.txt +repairs. pg3200.txt +repairs." pg3200.txt +reparenthesis, pg3200.txt +repast; pg100.txt +repay pg3200.txt +repay, pg100.txt +repaying pg100.txt +repayment pg3200.txt +repays pg100.txt +repeal'd pg100.txt +repeal'd-repeal'd pg100.txt +repeal. pg100.txt +repealed pg3200.txt +repeat pg31100.txt, pg3200.txt +repeat, pg3200.txt, pg100.txt +repeat. pg3200.txt +repeat." pg3200.txt +repeated pg31100.txt, pg3200.txt, pg100.txt +repeated)--i pg31100.txt +repeated, pg31100.txt, pg3200.txt +repeated--just pg3200.txt +repeated--or pg31100.txt +repeated. pg31100.txt, pg3200.txt +repeatedly pg31100.txt, pg3200.txt +repeatedly, pg31100.txt +repeatedly." pg31100.txt +repeatedly; pg31100.txt +repeating pg31100.txt, pg3200.txt +repeats pg3200.txt +repeats: pg3200.txt +repellant. pg3200.txt +repent pg3200.txt, pg100.txt +repent! pg100.txt +repent!" pg3200.txt +repent, pg3200.txt, pg100.txt +repent--though pg3200.txt +repent. pg31100.txt, pg100.txt +repent." pg3200.txt +repent; pg100.txt +repent? pg100.txt +repentance pg3200.txt, pg100.txt +repentance, pg100.txt +repentance. pg3200.txt, pg100.txt +repentances, pg3200.txt +repentancies. pg3200.txt +repentant. pg3200.txt +repented pg3200.txt, pg100.txt +repented. pg3200.txt +repenting pg3200.txt +repenting, pg3200.txt +repenting. pg31100.txt, pg3200.txt +repents pg3200.txt +repertoire pg3200.txt +repertoire. pg3200.txt +repetition pg31100.txt, pg3200.txt +repetition, pg31100.txt, pg3200.txt +repetition. pg3200.txt +repetitions. pg3200.txt, pg100.txt +repetitions; pg3200.txt +repile pg3200.txt +repin'd; pg100.txt +repine, pg3200.txt +repine. pg100.txt +repine." pg31100.txt +repining. pg3200.txt +replace pg3200.txt +replaced pg3200.txt +replaced; pg3200.txt +replacing pg31100.txt +replenish pg3200.txt +replete pg31100.txt +replete. pg100.txt +replied pg31100.txt, pg3200.txt, pg100.txt +replied, pg31100.txt, pg3200.txt, pg100.txt +replied-- pg31100.txt, pg3200.txt +replied. pg31100.txt, pg3200.txt, pg100.txt +replied: pg31100.txt, pg3200.txt +replied; pg31100.txt +replies pg31100.txt, pg100.txt +replies. pg3200.txt, pg100.txt +replies; pg3200.txt +repliest! pg100.txt +reply pg31100.txt, pg3200.txt, pg100.txt +reply, pg31100.txt, pg3200.txt +reply-- pg31100.txt, pg3200.txt +reply. pg31100.txt, pg3200.txt, pg100.txt +reply: pg31100.txt, pg3200.txt, pg100.txt +reply; pg31100.txt, pg100.txt +replying pg31100.txt, pg3200.txt +replying. pg3200.txt +replying." pg31100.txt +report pg31100.txt, pg3200.txt, pg100.txt +report! pg100.txt +report!" pg3200.txt +report'st pg100.txt +report, pg31100.txt, pg3200.txt, pg100.txt +report----" pg3200.txt +report--a pg3200.txt +report--commercial pg3200.txt +report--means pg3200.txt +report. pg31100.txt, pg3200.txt, pg100.txt +report." pg3200.txt +report: pg3200.txt, pg100.txt +report; pg31100.txt, pg100.txt +report? pg100.txt +report?" pg3200.txt +reported pg3200.txt, pg100.txt +reported, pg31100.txt, pg3200.txt +reported--invention pg3200.txt +reported. pg3200.txt, pg100.txt +reported: pg3200.txt +reported; pg100.txt +reported? pg100.txt +reporter pg3200.txt +reporter's pg3200.txt +reporter. pg3200.txt +reporters pg3200.txt +reporters' pg3200.txt +reporters, pg3200.txt +reporters. pg3200.txt +reporters: pg3200.txt +reporting pg3200.txt +reporting. pg100.txt +reports pg3200.txt, pg100.txt +reports, pg3200.txt, pg100.txt +reports--friend pg3200.txt +reports. pg3200.txt, pg100.txt +reports.] pg3200.txt +reports; pg100.txt +reposal pg100.txt +repose pg31100.txt, pg3200.txt, pg100.txt +repose! pg100.txt +repose, pg3200.txt, pg100.txt +repose. pg3200.txt, pg100.txt +repose: pg3200.txt +reposes pg31100.txt +reposing pg31100.txt, pg3200.txt +reprehend. pg100.txt +reprehensible, pg3200.txt +represent pg31100.txt, pg3200.txt +represent. pg3200.txt +represent." pg3200.txt +represent: pg100.txt +representation pg31100.txt, pg3200.txt +representation, pg31100.txt +representation." pg31100.txt +representations pg31100.txt +representative pg3200.txt +representative's pg3200.txt +representative, pg3200.txt +representative. pg3200.txt +representative; pg3200.txt +representatives pg3200.txt +representatives. pg3200.txt +representatives: pg3200.txt +represented pg31100.txt, pg3200.txt +represented. pg31100.txt, pg3200.txt +representing pg31100.txt, pg3200.txt +represents pg3200.txt +represents, pg3200.txt +repress pg31100.txt +repressed pg31100.txt +repressing pg31100.txt +repression; pg3200.txt +reprieve pg3200.txt, pg100.txt +reprieve, pg100.txt +reprieve. pg100.txt +reprieved pg3200.txt +reprimand, pg31100.txt +reprint pg3200.txt +reprinted pg3200.txt +reprisal; pg3200.txt +reproach pg31100.txt, pg3200.txt, pg100.txt +reproach, pg31100.txt +reproach- pg100.txt +reproach--" pg3200.txt +reproach. pg3200.txt, pg100.txt +reproach? pg100.txt +reproached pg31100.txt, pg3200.txt +reproaches pg100.txt +reproaches, pg31100.txt, pg3200.txt +reproaches. pg3200.txt +reproachfully. pg3200.txt +reproachfully: pg3200.txt +reproachfully? pg100.txt +reproaching pg31100.txt +reprobation. pg100.txt +reproduce pg3200.txt +reproduced pg3200.txt +reproduced, pg3200.txt +reproducing pg3200.txt +reproof pg100.txt +reproof, pg100.txt +reproof. pg31100.txt, pg100.txt +reproof." pg31100.txt +reproof: pg3200.txt +reproofs pg31100.txt +reprove pg100.txt +reprove. pg31100.txt, pg100.txt +reproved pg3200.txt +reproved. pg31100.txt +reproving, pg100.txt +reptile pg3200.txt +reptile!" pg3200.txt +reptile, pg3200.txt +reptile. pg3200.txt +reptile." pg3200.txt +reptile?" pg3200.txt +reptiles, pg3200.txt +reptiles. pg3200.txt +repton, pg31100.txt +repton." pg31100.txt +republic pg3200.txt +republic, pg3200.txt +republic," pg3200.txt +republic--even pg3200.txt +republic. pg3200.txt +republic." pg3200.txt +republic?" pg3200.txt +republican pg3200.txt +republicans pg3200.txt +republicans. pg3200.txt +republished pg3200.txt +repudiate pg3200.txt +repugnancy? pg100.txt +repulse pg3200.txt, pg100.txt +repulse, pg3200.txt +repulse; pg3200.txt +repulsed pg31100.txt +repulsed. pg3200.txt +repulsive pg31100.txt, pg3200.txt +repulsive, pg31100.txt, pg3200.txt +repulsive. pg3200.txt +repulsiveness pg3200.txt +repurifying pg3200.txt +reputa-- pg3200.txt +reputably pg3200.txt +reputation pg31100.txt, pg3200.txt, pg100.txt +reputation! pg100.txt +reputation's pg3200.txt +reputation, pg3200.txt, pg100.txt +reputation- pg100.txt +reputation--draws pg3200.txt +reputation. pg3200.txt, pg100.txt +reputation." pg3200.txt +reputation.'" pg3200.txt +reputation; pg100.txt +reputations pg3200.txt +reputations, pg3200.txt +repute pg100.txt +repute, pg3200.txt +repute--a pg3200.txt +repute. pg3200.txt +repute?" pg3200.txt +reputed pg100.txt +reputed. pg100.txt +request pg31100.txt, pg3200.txt, pg100.txt +request) pg3200.txt +request, pg3200.txt, pg100.txt +request- pg100.txt +request--but pg3200.txt +request--usually pg3200.txt +request. pg31100.txt, pg3200.txt, pg100.txt +request." pg3200.txt +request; pg100.txt +request? pg100.txt +requested pg31100.txt, pg3200.txt, pg100.txt +requested, pg100.txt +requested. pg100.txt +requesting pg31100.txt, pg3200.txt +requesting, pg31100.txt +requests pg100.txt +requests, pg3200.txt, pg100.txt +requests. pg100.txt +requir'd pg100.txt +requir'd; pg100.txt +require pg31100.txt, pg3200.txt, pg100.txt +require, pg3200.txt, pg100.txt +require. pg31100.txt, pg3200.txt, pg100.txt +require." pg31100.txt +require: pg3200.txt +require? pg3200.txt +required pg31100.txt, pg3200.txt, pg100.txt +required). pg3200.txt +required, pg31100.txt, pg3200.txt +required. pg31100.txt, pg3200.txt +required." pg31100.txt +required;--and pg31100.txt +required?" pg3200.txt +requirement pg3200.txt +requirement, pg3200.txt +requirements pg3200.txt +requirements. pg3200.txt +requirements." pg3200.txt +requires pg31100.txt, pg3200.txt, pg100.txt +requires, pg31100.txt +requires," pg31100.txt +requires. pg3200.txt, pg100.txt +requireth pg3200.txt +requiring pg31100.txt, pg3200.txt +requiring, pg100.txt +requisite pg3200.txt +requisite. pg3200.txt +requisites pg3200.txt +requisites; pg3200.txt +requisition." pg3200.txt +requisition?" pg3200.txt +requit pg100.txt +requital pg100.txt +requital. pg100.txt +requite pg100.txt +requite. pg100.txt +requite." pg3200.txt +requited pg31100.txt, pg3200.txt, pg100.txt +requited!" pg3200.txt +reread pg3200.txt +res'. pg3200.txt +res'." pg3200.txt +res, pg3200.txt +rescind pg3200.txt +rescu'd? pg100.txt +rescue pg3200.txt, pg100.txt +rescue! pg3200.txt, pg100.txt +rescue!' pg3200.txt +rescue, pg3200.txt, pg100.txt +rescue. pg3200.txt, pg100.txt +rescue: pg3200.txt +rescue? pg100.txt +rescued pg3200.txt, pg100.txt +rescues pg100.txt +rescuing pg3200.txt +research pg3200.txt +research. pg3200.txt +research; pg31100.txt +resemblance pg31100.txt, pg3200.txt +resemblance!" pg3200.txt +resemblance. pg3200.txt +resemblance." pg31100.txt +resemblances pg31100.txt, pg3200.txt +resemble pg31100.txt, pg3200.txt, pg100.txt +resemble, pg100.txt +resemble. pg100.txt +resembled pg31100.txt, pg3200.txt, pg100.txt +resembles pg3200.txt +resembles; pg100.txt +resembleth pg100.txt +resembling pg3200.txt +resembling, pg31100.txt +resent pg31100.txt +resent. pg31100.txt +resented pg31100.txt +resented. pg31100.txt +resentful pg3200.txt +resentfully pg31100.txt, pg3200.txt +resentfully: pg3200.txt +resenting pg3200.txt +resentment pg31100.txt, pg3200.txt +resentment, pg3200.txt +resentment-- pg31100.txt +resentment--conduct pg3200.txt +resentment. pg31100.txt +reserv'd pg100.txt +reserv'd, pg100.txt +reserv'd. pg100.txt +reservation pg3200.txt +reservation: pg3200.txt +reservations. pg3200.txt +reserve pg31100.txt, pg3200.txt, pg100.txt +reserve, pg31100.txt +reserve--i pg31100.txt +reserve-energies, pg3200.txt +reserve. pg31100.txt, pg3200.txt, pg100.txt +reserve." pg31100.txt +reserve: pg31100.txt +reserve; pg3200.txt +reserved pg31100.txt, pg3200.txt +reserved. pg31100.txt +reserved." pg31100.txt +reserves pg3200.txt +reserving pg3200.txt +reservoir pg3200.txt +reservoir, pg3200.txt +reservoir. pg3200.txt +resettled pg31100.txt +reshipment, pg3200.txt +reside pg3200.txt, pg100.txt +reside; pg31100.txt +resided pg3200.txt +residence pg31100.txt, pg3200.txt +residence, pg31100.txt, pg3200.txt, pg100.txt +residence. pg3200.txt, pg100.txt +residence." pg31100.txt +residence; pg3200.txt +residency. pg3200.txt +residency; pg3200.txt +resident pg31100.txt, pg3200.txt +resident, pg31100.txt, pg3200.txt +residents pg3200.txt +residents, pg3200.txt +residents. pg3200.txt +resides pg3200.txt, pg100.txt +resides- pg100.txt +residing pg31100.txt +residing, pg31100.txt +residue pg31100.txt +resign pg100.txt +resign! pg3200.txt +resign, pg100.txt +resign. pg3200.txt, pg100.txt +resign." pg3200.txt +resignation pg31100.txt, pg3200.txt +resignation, pg3200.txt +resignation." pg3200.txt +resignation: pg3200.txt +resignation; pg3200.txt +resigned pg31100.txt, pg3200.txt +resigned. pg31100.txt, pg3200.txt +resignedly-- pg3200.txt +resignedly: pg3200.txt +resigns pg3200.txt +resist pg31100.txt, pg3200.txt +resist, pg100.txt +resist. pg31100.txt, pg3200.txt, pg100.txt +resist?" pg3200.txt +resistance pg31100.txt, pg3200.txt +resistance. pg3200.txt, pg100.txt +resistance; pg31100.txt +resisted pg31100.txt, pg3200.txt +resisted. pg3200.txt, pg100.txt +resisting pg31100.txt, pg3200.txt +resisting, pg3200.txt +resistless pg3200.txt +resists pg100.txt +resk pg3200.txt +resks--he's pg3200.txt +resolute pg31100.txt, pg3200.txt +resolute. pg100.txt +resolute: pg100.txt +resolutely pg31100.txt, pg3200.txt +resolutely. pg3200.txt +resolutes, pg100.txt +resolution pg31100.txt, pg3200.txt, pg100.txt +resolution! pg31100.txt +resolution, pg31100.txt, pg3200.txt, pg100.txt +resolution--' pg3200.txt +resolution. pg31100.txt, pg3200.txt, pg100.txt +resolution." pg31100.txt +resolution: pg3200.txt +resolution; pg100.txt +resolutions pg31100.txt, pg3200.txt +resolutions; pg3200.txt +resolutions?" pg3200.txt +resolv'd pg100.txt +resolv'd! pg100.txt +resolv'd, pg100.txt +resolv'd. pg100.txt +resolve pg31100.txt, pg3200.txt, pg100.txt +resolve. pg100.txt +resolved pg31100.txt, pg3200.txt, pg100.txt +resolved, pg31100.txt, pg3200.txt, pg100.txt +resolved. pg100.txt +resolves pg31100.txt, pg100.txt +resolving pg31100.txt +resort pg31100.txt, pg3200.txt +resort, pg3200.txt, pg100.txt +resort- pg100.txt +resort. pg100.txt +resort: pg100.txt +resort; pg100.txt +resort? pg3200.txt +resorted pg3200.txt +resorting pg31100.txt +resorts pg3200.txt +resorts. pg3200.txt +resounded pg3200.txt +resounds pg100.txt +resource pg31100.txt +resources pg31100.txt, pg3200.txt +resources, pg3200.txt +respec' pg3200.txt +respect pg31100.txt, pg3200.txt, pg100.txt +respect! pg100.txt +respect, pg31100.txt, pg3200.txt, pg100.txt +respect," pg3200.txt +respect- pg100.txt +respect-- pg3200.txt +respect--but pg31100.txt +respect--no, pg3200.txt +respect-worthy, pg3200.txt +respect. pg31100.txt, pg3200.txt, pg100.txt +respect." pg31100.txt +respect.'" pg31100.txt +respect: pg3200.txt +respect; pg31100.txt, pg3200.txt, pg100.txt +respectability pg31100.txt, pg3200.txt +respectability! pg3200.txt +respectability, pg31100.txt +respectability. pg31100.txt, pg3200.txt +respectable pg31100.txt, pg3200.txt +respectable, pg31100.txt, pg3200.txt +respectable. pg3200.txt +respectable; pg31100.txt +respected pg31100.txt, pg3200.txt, pg100.txt +respected! pg31100.txt +respected, pg3200.txt +respected. pg31100.txt, pg3200.txt +respected; pg3200.txt +respecter pg3200.txt +respectful pg31100.txt, pg3200.txt +respectfully pg31100.txt, pg3200.txt +respectfully, pg3200.txt +respectfully. pg3200.txt +respectfulness pg3200.txt +respectfulness-- pg3200.txt +respectfulness. pg3200.txt +respecting pg31100.txt, pg3200.txt +respective pg31100.txt, pg3200.txt +respectively. pg3200.txt +respects pg31100.txt, pg3200.txt, pg100.txt +respects!" pg3200.txt +respects, pg3200.txt, pg100.txt +respects. pg3200.txt +respects; pg31100.txt +respectworthy pg3200.txt +respite pg3200.txt, pg100.txt +respite, pg3200.txt +respite. pg3200.txt +resplendent pg3200.txt +respond pg3200.txt +respond!" pg3200.txt +respond, pg3200.txt +respond--but pg3200.txt +respond. pg3200.txt +respond." pg3200.txt +responded pg3200.txt +responded-- pg3200.txt +responded: pg3200.txt +responded; pg3200.txt +response pg3200.txt +response, pg3200.txt +response--and pg3200.txt +response. pg3200.txt +responses. pg3200.txt +responsibilities pg3200.txt +responsibilities." pg3200.txt +responsibility pg3200.txt +responsibility!" pg31100.txt +responsibility. pg3200.txt +responsibility." pg3200.txt +responsibility.) pg3200.txt +responsible pg3200.txt +responsible. pg3200.txt +responsible." pg3200.txt +responsio!" pg3200.txt +responsive. pg3200.txt +rest pg31100.txt, pg3200.txt, pg100.txt +rest! pg3200.txt, pg100.txt +rest!" pg3200.txt +rest!' pg3200.txt, pg100.txt +rest" pg3200.txt +rest). pg3200.txt +rest, pg31100.txt, pg3200.txt, pg100.txt +rest- pg100.txt +rest--" pg3200.txt +rest--for pg3200.txt +rest. pg31100.txt, pg3200.txt, pg100.txt +rest." pg31100.txt, pg3200.txt +rest.' pg3200.txt +rest.--to pg31100.txt +rest: pg100.txt +rest; pg31100.txt, pg3200.txt, pg100.txt +rest? pg31100.txt, pg3200.txt, pg100.txt +rest?" pg31100.txt +restated. pg3200.txt +restaurant pg3200.txt +restaurant, pg3200.txt +restaurants--but pg3200.txt +restaurants--duchesses pg3200.txt +restaurants. pg3200.txt +restaurants; pg3200.txt +rested pg3200.txt, pg100.txt +rested, pg3200.txt +rested. pg3200.txt +rested: pg3200.txt +rested; pg3200.txt +resteth pg100.txt +restful pg3200.txt +resting pg3200.txt +resting, pg3200.txt +resting-place pg3200.txt +resting. pg3200.txt +resting; pg3200.txt +restitution. pg100.txt +restitution; pg3200.txt +restive pg3200.txt +restless pg31100.txt, pg3200.txt +restless, pg31100.txt, pg3200.txt +restless. pg3200.txt +restlessness pg31100.txt, pg3200.txt +restlessness, pg31100.txt +restlessness; pg3200.txt +restor'd pg100.txt +restor'd! pg100.txt +restor'd? pg100.txt +restoration pg31100.txt, pg3200.txt +restoration, pg3200.txt +restoration- pg3200.txt +restoratives pg31100.txt +restore pg31100.txt, pg3200.txt, pg100.txt +restore! pg100.txt +restore, pg100.txt +restore. pg100.txt +restored pg31100.txt, pg3200.txt, pg100.txt +restored, pg31100.txt, pg3200.txt +restored. pg3200.txt +restored." pg3200.txt +restoring pg31100.txt +restrain pg31100.txt, pg3200.txt, pg100.txt +restrain'd, pg100.txt +restrain'd. pg100.txt +restrained pg31100.txt, pg3200.txt +restrained, pg100.txt +restraining pg3200.txt, pg100.txt +restraint pg31100.txt, pg100.txt +restraint, pg31100.txt, pg100.txt +restraint. pg3200.txt, pg100.txt +restraint; pg31100.txt, pg100.txt +restraint? pg100.txt +restraints, pg31100.txt +restraints. pg31100.txt +restraints; pg31100.txt +restricted pg31100.txt, pg3200.txt +restricted. pg3200.txt +restrictions pg31100.txt, pg3200.txt +rests pg3200.txt, pg100.txt +rests: pg100.txt +rests? pg100.txt +result pg31100.txt, pg3200.txt +result! pg3200.txt +result!" pg3200.txt +result, pg31100.txt, pg3200.txt +result-- pg3200.txt +result--the pg3200.txt +result. pg31100.txt, pg3200.txt +result." pg3200.txt +result: pg3200.txt +result; pg3200.txt +result? pg3200.txt +result?" pg31100.txt, pg3200.txt +resulted pg3200.txt +resulted." pg3200.txt +resulting pg31100.txt, pg3200.txt, pg100.txt +resulting, pg31100.txt +resulting. pg3200.txt +resultless pg3200.txt +resultless. pg3200.txt +results pg31100.txt, pg3200.txt +results, pg3200.txt +results. pg3200.txt +results: pg3200.txt +results; pg3200.txt +resume pg3200.txt, pg100.txt +resume, pg3200.txt +resumed pg31100.txt, pg3200.txt +resumed, pg31100.txt +resumed-- pg3200.txt +resumed. pg3200.txt +resumed: pg3200.txt +resumed:] pg3200.txt +resuming pg3200.txt +resuming, pg3200.txt +resuming: pg3200.txt +resuming] pg3200.txt +resurrected pg3200.txt +resurrection pg3200.txt +resurrection, pg3200.txt +resurrection. pg3200.txt +resurrection; pg3200.txt +resuscitated. pg3200.txt +retail. pg3200.txt +retailed pg31100.txt, pg3200.txt +retailer pg3200.txt +retain pg31100.txt, pg3200.txt, pg100.txt +retain. pg3200.txt +retain? pg100.txt +retained pg3200.txt +retained. pg31100.txt +retainers. pg3200.txt +retaining pg31100.txt +retaliate, pg31100.txt +retaliation pg3200.txt +retaliation. pg3200.txt +retarding pg31100.txt +retards pg3200.txt +retchings pg3200.txt +retchings." pg3200.txt +retell pg3200.txt +retention. pg100.txt +retentive--and pg31100.txt +reticence: pg3200.txt +reties pg3200.txt +retinue pg3200.txt, pg100.txt +retinue, pg100.txt +retir'd pg100.txt +retir'd, pg100.txt +retire pg31100.txt, pg3200.txt, pg100.txt +retire, pg3200.txt, pg100.txt +retire. pg3200.txt, pg100.txt +retire." pg3200.txt +retire; pg100.txt +retire] pg100.txt +retired pg31100.txt, pg3200.txt +retired, pg3200.txt, pg100.txt +retired. pg3200.txt +retired." pg31100.txt +retirement pg31100.txt +retirement, pg31100.txt +retirement. pg31100.txt +retirement.--and pg31100.txt +retires, pg100.txt +retires. pg100.txt +retires] pg100.txt +retiring pg31100.txt, pg3200.txt +retold pg3200.txt +retold, pg100.txt +retort pg31100.txt, pg3200.txt, pg100.txt +retorted pg3200.txt +retorted. pg3200.txt +retorted: pg3200.txt +retrace, pg31100.txt +retrace--proven pg3200.txt +retract pg31100.txt, pg3200.txt +retract, pg31100.txt +retraction pg3200.txt +retraction, pg3200.txt +retranslation pg3200.txt +retranslation.] pg3200.txt +retreat pg31100.txt, pg3200.txt, pg100.txt +retreat!" pg3200.txt +retreat!' pg3200.txt +retreat, pg31100.txt, pg3200.txt, pg100.txt +retreat--up pg3200.txt +retreat. pg3200.txt +retreat; pg3200.txt, pg100.txt +retreat?" pg3200.txt +retreat] pg100.txt +retreating pg3200.txt +retreating, pg3200.txt +retreating. pg3200.txt +retreats: pg3200.txt +retrench." pg3200.txt +retrench?" pg31100.txt +retrenched pg31100.txt +retribution. pg3200.txt +retrogression." pg3200.txt +retrogression.' pg3200.txt +retrospect pg3200.txt +retrospect. pg31100.txt +retrospection. pg3200.txt +return pg31100.txt, pg3200.txt, pg100.txt +return! pg31100.txt +return' pg100.txt +return'd pg100.txt +return'd! pg100.txt +return'd, pg100.txt +return'd. pg100.txt +return'd; pg100.txt +return'd? pg100.txt +return, pg31100.txt, pg3200.txt, pg100.txt +return--" pg3200.txt +return--probably pg31100.txt +return--sellers pg3200.txt +return. pg31100.txt, pg3200.txt, pg100.txt +return." pg31100.txt, pg3200.txt +return; pg3200.txt, pg100.txt +return? pg31100.txt, pg100.txt +return?" pg31100.txt +returned pg31100.txt, pg3200.txt, pg100.txt +returned, pg31100.txt, pg3200.txt +returned. pg31100.txt, pg3200.txt +returned: pg3200.txt +returned; pg31100.txt, pg3200.txt +returneth pg100.txt +returneth." pg3200.txt +returning pg31100.txt, pg3200.txt, pg100.txt +returning, pg31100.txt +returning. pg31100.txt, pg3200.txt, pg100.txt +returns pg31100.txt, pg3200.txt, pg100.txt +returns. pg3200.txt, pg100.txt +returns." pg31100.txt, pg3200.txt +returns: pg100.txt +returns; pg3200.txt +retying pg3200.txt +retz, pg3200.txt +reuben pg3200.txt +reubens pg3200.txt +reunion pg3200.txt +reuter's pg3200.txt +rev. pg31100.txt, pg3200.txt +rev., pg3200.txt +reveal pg3200.txt, pg100.txt +reveal'd pg100.txt +reveal. pg100.txt +revealed pg31100.txt, pg3200.txt +revealed! pg3200.txt +revealed, pg31100.txt, pg3200.txt +revealed. pg31100.txt, pg3200.txt +revealment pg3200.txt +revealments. pg3200.txt +reveals pg3200.txt +reveals. pg3200.txt +reveille. pg3200.txt +reveille; pg3200.txt +reveille] pg3200.txt +revel pg3200.txt, pg100.txt +revel, pg100.txt +revel. pg3200.txt +revelation pg3200.txt +revelation. pg3200.txt +revelation." pg3200.txt +revelation?" pg3200.txt +revelations pg3200.txt +reveled pg3200.txt +reveler! pg100.txt +reveling pg3200.txt +reveller. pg100.txt +revelling. pg100.txt +revelry. pg100.txt +revels pg3200.txt +revels? pg100.txt +reveng'd pg100.txt +reveng'd, pg100.txt +reveng'd. pg100.txt +reveng'd; pg100.txt +reveng'd? pg100.txt +revenge pg31100.txt, pg3200.txt, pg100.txt +revenge! pg100.txt +revenge, pg31100.txt, pg3200.txt, pg100.txt +revenge- pg100.txt +revenge. pg3200.txt, pg100.txt +revenge." pg31100.txt +revenge; pg3200.txt, pg100.txt +revenge? pg100.txt +revenged pg100.txt +revenged, pg100.txt +revenged. pg100.txt +revengeful pg3200.txt +revengeful. pg3200.txt +revenges pg100.txt +revenges, pg100.txt +revenges. pg100.txt +revenue pg3200.txt, pg100.txt +revenue. pg3200.txt, pg100.txt +revenues. pg3200.txt, pg100.txt +reverb'rate pg100.txt +revere pg3200.txt +revere, pg3200.txt +revered, pg3200.txt +reverence pg3200.txt, pg100.txt +reverence!" pg3200.txt +reverence) pg100.txt +reverence, pg3200.txt, pg100.txt +reverence- pg100.txt +reverence-compelling pg3200.txt +reverence. pg3200.txt, pg100.txt +reverence." pg3200.txt +reverence? pg100.txt +reverence?" pg3200.txt +reverenced." pg3200.txt +reverend pg3200.txt, pg100.txt +reverend, pg100.txt +reverend. pg3200.txt +reverent pg3200.txt +reverently pg3200.txt +reverently, pg3200.txt, pg100.txt +reverently. pg100.txt +reverently: pg3200.txt +reverie pg31100.txt +reverie, pg3200.txt +reverie. pg3200.txt +reverie." pg31100.txt +reveries pg3200.txt +revers'd. pg100.txt +reversal pg3200.txt +reverse pg31100.txt, pg3200.txt +reverse!" pg31100.txt +reverse--for, pg3200.txt +reverse. pg31100.txt, pg3200.txt +reverse." pg3200.txt +reverse.--but pg31100.txt +reverse; pg31100.txt +reversed, pg3200.txt +reversed. pg31100.txt, pg3200.txt +reverses pg3200.txt +reversion. pg100.txt +revert pg31100.txt +reverted pg31100.txt, pg3200.txt +reverted, pg100.txt +review pg31100.txt, pg3200.txt +review, pg3200.txt, pg100.txt +review. pg3200.txt +review.") pg3200.txt +review; pg3200.txt +reviewer pg31100.txt +reviewing pg3200.txt +reviews, pg3200.txt +revil'd pg100.txt +reviled pg3200.txt +reviled, pg3200.txt +reviling pg3200.txt +revise pg3200.txt +revised pg31100.txt, pg3200.txt +revised, pg3200.txt +revises, pg3200.txt +revision pg3200.txt +revisit pg3200.txt +revisited. pg3200.txt +reviv'd pg100.txt +reviv'd, pg100.txt +revival pg3200.txt +revival. pg3200.txt +revive. pg100.txt +revived pg31100.txt, pg3200.txt +revived. pg31100.txt, pg3200.txt +revives. pg100.txt +reviving pg31100.txt +revok'd. pg100.txt +revoke pg100.txt +revokement pg100.txt +revolt pg3200.txt, pg100.txt +revolt, pg100.txt +revolt. pg3200.txt, pg100.txt +revolt; pg100.txt +revolt? pg100.txt +revolted pg31100.txt, pg3200.txt, pg100.txt +revolted. pg100.txt +revolting, pg3200.txt +revolting. pg3200.txt +revolts pg100.txt +revolts, pg100.txt +revolution pg31100.txt, pg3200.txt +revolution, pg3200.txt, pg100.txt +revolution. pg3200.txt +revolutionary pg3200.txt +revolutionists, pg3200.txt +revolutionized pg3200.txt +revolutions pg3200.txt +revolve pg100.txt +revolver pg3200.txt +revolver, pg3200.txt +revolver-shots; pg3200.txt +revolver. pg3200.txt +revolver: pg3200.txt +revolver; pg3200.txt +revolvers pg3200.txt +revolvers, pg3200.txt +revolving pg3200.txt +reward pg31100.txt, pg3200.txt, pg100.txt +reward' pg3200.txt +reward, pg3200.txt, pg100.txt +reward--dat's pg3200.txt +reward--that pg3200.txt +reward. pg31100.txt, pg3200.txt, pg100.txt +reward." pg3200.txt +reward; pg3200.txt +reward?" pg3200.txt +rewarded pg31100.txt, pg3200.txt +rewarded!" pg3200.txt +rewarded, pg3200.txt +rewarded. pg100.txt +rewards pg3200.txt, pg100.txt +rewards, pg100.txt +rewards--at pg3200.txt +rewards. pg31100.txt +rewards." pg3200.txt +reworded pg100.txt +rewrite pg3200.txt +reynaldo, pg100.txt +reynaldo. pg100.txt +reynaldo? pg100.txt +reynolds pg31100.txt +rhapsodies pg3200.txt +rhapsody pg3200.txt +rhapsody, pg3200.txt +rheims pg3200.txt +rheims!" pg3200.txt +rheims, pg3200.txt +rheims. pg3200.txt +rheims; pg100.txt +rheims?" pg3200.txt +rhetoric pg100.txt +rhetoric! pg100.txt +rhetoric. pg3200.txt +rheum! pg100.txt +rheum, pg100.txt +rheum. pg100.txt +rheum; pg100.txt +rheumatic pg31100.txt +rheumatism pg3200.txt +rheumatism, pg3200.txt +rheumatism. pg3200.txt +rheumatism? pg31100.txt +rhime!" pg31100.txt +rhine pg3200.txt +rhine, pg3200.txt +rhine. pg3200.txt +rhine; pg3200.txt +rhinoceros pg3200.txt +rhinoceros. pg3200.txt +rhode pg3200.txt +rhodes pg3200.txt +rhodes' pg3200.txt +rhodes, pg3200.txt, pg100.txt +rhodes. pg3200.txt, pg100.txt +rhodes." pg3200.txt +rhone pg3200.txt +rhone, pg3200.txt +rhone. pg3200.txt +rhym'd. pg100.txt +rhyme pg3200.txt, pg100.txt +rhyme! pg100.txt +rhyme, pg100.txt +rhyme-jingle? pg3200.txt +rhyme. pg100.txt +rhyme: pg3200.txt +rhyme; pg100.txt +rhyme? pg100.txt +rhymer. pg3200.txt +rhymers pg100.txt +rhymes pg3200.txt, pg100.txt +rhymes, pg3200.txt, pg100.txt +rhythm-- pg3200.txt +ri-i-ley, pg3200.txt +rialto. pg3200.txt +rialto? pg100.txt +rib pg3200.txt +rib-breaking? pg100.txt +ribald pg3200.txt +ribald, pg3200.txt +ribbon pg3200.txt +ribbon, pg31100.txt +ribbon; pg3200.txt +ribbons pg31100.txt +ribs pg3200.txt, pg100.txt +ribs, pg100.txt +ribs--" pg3200.txt +ribs. pg3200.txt, pg100.txt +ribs." pg3200.txt +rice pg31100.txt, pg3200.txt +rice, pg3200.txt +rice. pg3200.txt +rich pg31100.txt, pg3200.txt, pg100.txt +rich! pg3200.txt, pg100.txt +rich!" pg3200.txt +rich" pg3200.txt +rich'd, pg100.txt +rich, pg31100.txt, pg3200.txt, pg100.txt +rich--and pg3200.txt +rich. pg3200.txt, pg100.txt +rich." pg31100.txt, pg3200.txt +rich; pg3200.txt, pg100.txt +rich? pg100.txt +rich?" pg31100.txt, pg3200.txt +richard pg3200.txt, pg100.txt +richard! pg31100.txt, pg100.txt +richard!' pg100.txt +richard's pg31100.txt, pg3200.txt +richard, pg100.txt +richard. pg3200.txt, pg100.txt +richard." pg31100.txt, pg3200.txt +richard.' pg100.txt +richard.) pg3200.txt +richard: pg100.txt +richard; pg100.txt +richards pg3200.txt +richards, pg3200.txt +richards." pg3200.txt +richards; pg3200.txt +richardson pg31100.txt, pg3200.txt +richardson, pg3200.txt +richardsons." pg31100.txt +richelieu! pg3200.txt +richelieu. pg3200.txt +richemont pg3200.txt +richemont, pg3200.txt +richer- pg100.txt +richer. pg3200.txt, pg100.txt +riches pg3200.txt, pg100.txt +riches, pg3200.txt +riches. pg3200.txt +riches; pg100.txt +richest pg3200.txt +richest, pg3200.txt +richly pg3200.txt +richly, pg3200.txt +richmond pg31100.txt, pg100.txt +richmond! pg100.txt +richmond, pg100.txt +richmond. pg31100.txt, pg100.txt +richmond." pg31100.txt +richmond; pg100.txt +richmond? pg100.txt +richness pg3200.txt +richness, pg3200.txt +richness. pg3200.txt +rickety pg3200.txt +rickety, pg3200.txt +rid pg31100.txt, pg3200.txt, pg100.txt +riddance. pg100.txt +ridden pg3200.txt +riddle pg31100.txt, pg3200.txt +riddle! pg100.txt +riddle-book pg31100.txt +riddle?" pg3200.txt +riddled pg3200.txt +riddles pg3200.txt +ride pg31100.txt, pg3200.txt, pg100.txt +ride'! pg100.txt +ride's pg100.txt +ride, pg3200.txt, pg100.txt +ride--a pg3200.txt +ride. pg31100.txt, pg3200.txt +ride." pg31100.txt +ride; pg3200.txt +ride? pg3200.txt, pg100.txt +rider pg3200.txt +rider's pg3200.txt +riders, pg3200.txt +riders. pg3200.txt +rides pg31100.txt +rides." pg3200.txt +ridge pg31100.txt, pg3200.txt, pg100.txt +ridge" pg3200.txt +ridge--a pg3200.txt +ridge. pg3200.txt +ridges pg3200.txt +ridges--surmounted pg3200.txt +ridicule pg31100.txt, pg3200.txt +ridicule. pg31100.txt, pg3200.txt +ridicule." pg31100.txt +ridiculed, pg3200.txt +ridiculed; pg31100.txt +ridiculous pg31100.txt, pg3200.txt, pg100.txt +ridiculous! pg31100.txt +ridiculous!" pg31100.txt, pg3200.txt +ridiculous, pg31100.txt, pg100.txt +ridiculous. pg31100.txt, pg3200.txt +ridiculous." pg3200.txt +ridiculous; pg3200.txt +ridiculously pg3200.txt +ridiculously?" pg31100.txt +ridiculousness, pg3200.txt +riding pg31100.txt, pg3200.txt, pg100.txt +riding! pg3200.txt +riding!" pg31100.txt +riding, pg31100.txt +riding-horses, pg3200.txt +riding-robes? pg100.txt +riding-rods, pg100.txt +riding-schools. pg3200.txt +riding." pg31100.txt +ridley' pg3200.txt +riff-raff pg3200.txt +riffel pg3200.txt +riffelberg pg3200.txt +riffelberg. pg3200.txt +riffelberg." pg3200.txt +riffians pg3200.txt +rifle pg3200.txt +rifle, pg3200.txt +rifle. pg3200.txt +rifle: pg3200.txt +rifled pg3200.txt +rift pg3200.txt, pg100.txt +rift. pg100.txt +rig pg3200.txt +rig, pg3200.txt +rigg'd, pg100.txt +rigg'd. pg100.txt +rigged pg3200.txt +rigging pg3200.txt +riggish. pg100.txt +right pg31100.txt, pg3200.txt, pg100.txt +right! pg31100.txt, pg3200.txt, pg100.txt +right!" pg3200.txt +right!' pg3200.txt +right" pg3200.txt +right, pg31100.txt, pg3200.txt, pg100.txt +right," pg3200.txt +right- pg100.txt +right-- pg3200.txt +right--" pg3200.txt +right--blasting-time pg3200.txt +right--but pg3200.txt +right--he pg3200.txt +right--he's pg3200.txt +right--it pg3200.txt +right--man--and pg3200.txt +right--perfectly pg3200.txt +right--show pg3200.txt +right--space pg3200.txt +right--that's pg3200.txt +right--they pg31100.txt +right--three pg3200.txt +right--when pg3200.txt +right-bank pg3200.txt +right-down pg3200.txt +right-fielder pg3200.txt +right-hearted pg3200.txt +right-hearted, pg3200.txt +right-principled pg3200.txt +right. pg31100.txt, pg3200.txt, pg100.txt +right." pg31100.txt, pg3200.txt +right.' pg3200.txt +right.) pg3200.txt +right.-- pg31100.txt +right.--in pg31100.txt +right.] pg3200.txt +right: pg31100.txt +right; pg31100.txt, pg3200.txt, pg100.txt +right? pg31100.txt, pg3200.txt, pg100.txt +right?" pg31100.txt, pg3200.txt +right?'" pg3200.txt +right?--that's pg3200.txt +righteous pg3200.txt +righteous; pg100.txt +righteousness pg3200.txt +righteousness, pg3200.txt +righteousness? pg3200.txt +rightful pg3200.txt, pg100.txt +rightfully pg3200.txt +rightly pg31100.txt, pg3200.txt, pg100.txt +rightly, pg3200.txt, pg100.txt +rightly--carved pg3200.txt +rightly. pg100.txt +rights pg31100.txt, pg3200.txt, pg100.txt +rights, pg31100.txt, pg3200.txt, pg100.txt +rights--he pg3200.txt +rights. pg31100.txt, pg3200.txt, pg100.txt +rights." pg3200.txt +rights; pg100.txt +rights? pg31100.txt +rigi-kulm pg3200.txt +rigi?" pg3200.txt +rigid pg3200.txt +rigid. pg3200.txt +rigidly pg3200.txt +rigidly, pg3200.txt +rigors pg3200.txt +rigour, pg100.txt +rigours pg31100.txt +riled. pg3200.txt +riley pg3200.txt +riley, pg3200.txt +rill. pg3200.txt +rim pg3200.txt +rinaldo, pg100.txt +rind pg3200.txt +rind, pg3200.txt, pg100.txt +ring pg31100.txt, pg3200.txt, pg100.txt +ring! pg3200.txt +ring, pg31100.txt, pg3200.txt, pg100.txt +ring- pg100.txt +ring--that pg3200.txt +ring-carrier! pg100.txt +ring-finger--" pg3200.txt +ring-streaked-and- pg3200.txt +ring. pg3200.txt, pg100.txt +ring." pg3200.txt +ring: pg100.txt +ring; pg100.txt +ring? pg100.txt +ring?" pg3200.txt +ring] pg100.txt +ringing pg31100.txt, pg3200.txt +ringing, pg3200.txt +ringleader, pg3200.txt +ringleaders pg3200.txt +ringmaster pg3200.txt +rings pg3200.txt +rings, pg3200.txt, pg100.txt +rings. pg3200.txt, pg100.txt +rings; pg100.txt +rings? pg100.txt +rio pg3200.txt +riot pg3200.txt +riot, pg3200.txt +riot--was pg3200.txt +riot. pg100.txt +riot." pg31100.txt +riot; pg100.txt +riot?" pg31100.txt, pg3200.txt +rioted pg3200.txt +rioter. pg3200.txt +rioters, pg3200.txt +rioting pg3200.txt +riotous pg3200.txt +riots pg3200.txt +riots, pg3200.txt, pg100.txt +riots. pg3200.txt, pg100.txt +rip pg3200.txt, pg100.txt +rip!" pg3200.txt +ripe pg3200.txt, pg100.txt +ripe, pg3200.txt, pg100.txt +ripe. pg100.txt +ripe: pg100.txt +ripe; pg3200.txt, pg100.txt +ripely pg100.txt +ripening pg31100.txt, pg3200.txt +ripening? pg3200.txt +ripp'd, pg100.txt +ripp'd. pg100.txt +ripped pg3200.txt +ripped. pg3200.txt +ripper pg3200.txt +ripper!' pg3200.txt +ripping pg3200.txt +ripple pg3200.txt +rippled pg3200.txt +ripples pg3200.txt +rippling pg3200.txt +rise pg31100.txt, pg3200.txt, pg100.txt +rise, pg3200.txt, pg100.txt +rise--slowly--still pg3200.txt +rise. pg31100.txt, pg3200.txt, pg100.txt +rise." pg3200.txt +rise; pg100.txt +rise?" pg3200.txt +risen pg31100.txt, pg3200.txt +risen, pg3200.txt +rises pg3200.txt, pg100.txt +rises. pg100.txt +rises] pg100.txt +riseth pg100.txt +riseth] pg100.txt +rising pg31100.txt, pg3200.txt, pg100.txt +rising, pg31100.txt, pg3200.txt, pg100.txt +rising. pg3200.txt, pg100.txt +rising." pg3200.txt +risk pg31100.txt, pg3200.txt +risk!" pg31100.txt +risk, pg31100.txt, pg3200.txt +risk--it pg3200.txt +risk. pg31100.txt, pg3200.txt +risk." pg3200.txt +risks pg3200.txt +risks, pg3200.txt +risks. pg31100.txt, pg3200.txt +risks.--liable pg31100.txt +risks? pg3200.txt +risky pg3200.txt +risky--they pg3200.txt +risky. pg3200.txt +ritchie pg3200.txt +rite, pg100.txt +rite. pg100.txt +rite; pg100.txt +rite? pg100.txt +rites pg3200.txt, pg100.txt +rites, pg100.txt +rites. pg3200.txt, pg100.txt +rites: pg100.txt +ritter pg3200.txt +rival pg3200.txt +rival's, pg31100.txt +rival. pg3200.txt +rival; pg31100.txt +rivaled pg3200.txt +rivals. pg31100.txt, pg100.txt +riven pg3200.txt +river pg3200.txt, pg100.txt +river!" pg3200.txt +river!' pg3200.txt +river' pg3200.txt +river's pg3200.txt +river, pg3200.txt +river- pg3200.txt +river--at pg3200.txt +river--grounds pg3200.txt +river--he pg3200.txt +river-bank pg3200.txt +river-bank. pg3200.txt +river-frontage pg3200.txt +river. pg3200.txt, pg100.txt +river." pg3200.txt +river.' pg3200.txt +river.'" pg3200.txt +river; pg3200.txt +river?" pg3200.txt +river?' pg3200.txt +riverdale pg3200.txt +riverdale, pg3200.txt +riverdale-on-the-hudson, pg3200.txt +riverdale-on-the-hudson. pg3200.txt +riverdale. pg3200.txt +rivers pg3200.txt, pg100.txt +rivers" pg3200.txt +rivers, pg3200.txt, pg100.txt +rivers--arrival pg3200.txt +rivers. pg3200.txt +rivers; pg3200.txt, pg100.txt +rivet pg31100.txt, pg3200.txt +riveted, pg100.txt +rivets pg100.txt +riviera. pg3200.txt +rivoli pg3200.txt +rivoltella. pg3200.txt +rivulet pg3200.txt +rivulets. pg3200.txt +riz pg3200.txt +road pg31100.txt, pg3200.txt, pg100.txt +road!" pg3200.txt +road, pg31100.txt, pg3200.txt +road--a pg3200.txt +road-decoration. pg3200.txt +road-side pg3200.txt +road. pg31100.txt, pg3200.txt, pg100.txt +road." pg31100.txt, pg3200.txt +road.' pg3200.txt +road; pg31100.txt, pg3200.txt, pg100.txt +road? pg100.txt +road?" pg3200.txt +roads pg3200.txt +roads! pg3200.txt +roads, pg31100.txt, pg3200.txt +roads. pg3200.txt +roads; pg100.txt +roadside, pg3200.txt +roadside. pg3200.txt +roadsides pg3200.txt +roadstead pg3200.txt +roadway pg3200.txt +roadways pg3200.txt +roamed pg3200.txt +roaming? pg100.txt +roar pg3200.txt, pg100.txt +roar! pg3200.txt, pg100.txt +roar'd pg100.txt +roar'd. pg100.txt +roar, pg3200.txt, pg100.txt +roar. pg100.txt +roar; pg100.txt +roar? pg100.txt +roared pg31100.txt, pg3200.txt +roared, pg3200.txt +roaring pg3200.txt, pg100.txt +roaring. pg100.txt +roaring: pg3200.txt +roars pg100.txt +roars, pg100.txt +roars. pg3200.txt +roast pg31100.txt +roast, pg100.txt +roasted pg3200.txt +roastin' pg3200.txt +roasting pg3200.txt +roasting, pg31100.txt +rob pg3200.txt, pg100.txt +rob, pg3200.txt +rob; pg3200.txt +rob?" pg3200.txt +robb'ry? pg100.txt +robbed pg31100.txt, pg3200.txt +robbed!" pg3200.txt +robbed--and pg3200.txt +robbed. pg3200.txt +robber pg3200.txt +robber, pg100.txt +robber." pg3200.txt +robberies, pg3200.txt +robbers pg3200.txt +robbers, pg100.txt +robbers. pg3200.txt +robbers; pg3200.txt +robbers?" pg3200.txt +robbery pg3200.txt +robbery, pg3200.txt +robbery; pg3200.txt +robbery? pg3200.txt +robbery?" pg3200.txt +robbing pg3200.txt, pg100.txt +robbing, pg3200.txt +robe pg31100.txt, pg3200.txt, pg100.txt +robe! pg3200.txt, pg100.txt +robe, pg100.txt +robe. pg3200.txt, pg100.txt +robe? pg100.txt +robed pg3200.txt +robert pg31100.txt, pg3200.txt +robert! pg100.txt +robert's pg31100.txt +robert, pg31100.txt, pg100.txt +robert." pg31100.txt +robert? pg100.txt +robertson, pg31100.txt +robertus pg31100.txt +robes pg3200.txt, pg100.txt +robes! pg100.txt +robes, pg3200.txt, pg100.txt +robes; pg100.txt +robes? pg100.txt +robin pg100.txt +robin, pg100.txt +robin. pg100.txt +robinson pg3200.txt +robinson"--would pg3200.txt +robinson's pg31100.txt, pg3200.txt +robinson, pg3200.txt +robinson. pg3200.txt +robinson." pg31100.txt +robinson; pg31100.txt +robinson? pg3200.txt +robs pg3200.txt +robust pg3200.txt +robust. pg3200.txt +rochester pg3200.txt +rochester." pg3200.txt +rock pg31100.txt, pg3200.txt, pg100.txt +rock!" pg3200.txt +rock, pg3200.txt, pg100.txt +rock. pg3200.txt, pg100.txt +rock." pg3200.txt +rock.' pg3200.txt +rock; pg3200.txt +rock? pg100.txt +rockefeller pg3200.txt +rockefeller." pg3200.txt +rocket-sprays pg3200.txt +rockies pg3200.txt +rockies?" pg3200.txt +rocking pg3200.txt +rocking-chair, pg3200.txt +rocking-chairs pg3200.txt +rocking. pg3200.txt +rocks pg31100.txt, pg3200.txt, pg100.txt +rocks, pg3200.txt, pg100.txt +rocks--but pg3200.txt +rocks--which pg3200.txt +rocks. pg3200.txt, pg100.txt +rocks." pg31100.txt +rocks; pg3200.txt +rocks? pg100.txt +rocky pg3200.txt +rococo, pg3200.txt +rod pg3200.txt, pg100.txt +rod! pg100.txt +rod, pg3200.txt, pg100.txt +rod. pg3200.txt, pg100.txt +rode pg31100.txt, pg3200.txt, pg100.txt +rode. pg3200.txt +rode? pg100.txt +roderigo! pg100.txt +roderigo, pg100.txt +roderigo. pg100.txt +roderigo: pg100.txt +roderigo? pg100.txt +rodgers. pg3200.txt +rods pg3200.txt, pg100.txt +rods, pg100.txt +roe, pg100.txt +roe. pg100.txt +rogero? pg100.txt +rogers pg3200.txt +rogers's pg3200.txt +rogers, pg3200.txt +rogers. pg3200.txt +rogers.] pg3200.txt +rogers?" pg3200.txt +rogue pg100.txt +rogue! pg100.txt +rogue, pg100.txt +rogue- pg100.txt +rogue. pg100.txt +rogue; pg100.txt +roguery! pg100.txt +rogues pg100.txt +rogues, pg100.txt +roi pg3200.txt +roi); pg3200.txt +roi. pg3200.txt +roi.' pg3200.txt +roland! pg3200.txt +role pg3200.txt +roles, pg3200.txt +roll pg31100.txt, pg3200.txt, pg100.txt +roll? pg100.txt +rolled pg3200.txt +rolled, pg3200.txt +rollicking pg3200.txt +rolling pg31100.txt, pg3200.txt +rolling, pg100.txt +rolling-pin pg3200.txt +rolling: pg100.txt +rolls, pg3200.txt, pg100.txt +roma pg3200.txt +roman pg3200.txt, pg100.txt +roman! pg3200.txt +roman!--going pg3200.txt +roman, pg3200.txt, pg100.txt +roman. pg3200.txt, pg100.txt +roman; pg100.txt +roman? pg100.txt +romance pg3200.txt +romance! pg3200.txt +romance!" pg3200.txt +romance, pg31100.txt, pg3200.txt +romance. pg3200.txt +romances pg3200.txt +romances, pg3200.txt +romances. pg3200.txt +romancing pg3200.txt +romancist pg3200.txt +romans pg100.txt +romans! pg100.txt +romans, pg3200.txt, pg100.txt +romans- pg100.txt +romans. pg100.txt +romans? pg100.txt +romantic pg31100.txt, pg3200.txt +romantic, pg31100.txt +romantic. pg3200.txt +romantic." pg31100.txt +romanticism, pg3200.txt +romanticist. pg3200.txt +romantics pg3200.txt +romaunt. pg3200.txt +rome pg3200.txt, pg100.txt +rome! pg3200.txt, pg100.txt +rome, pg3200.txt, pg100.txt +rome- pg100.txt +rome--and pg3200.txt +rome--nor pg3200.txt +rome. pg3200.txt, pg100.txt +rome." pg3200.txt +rome.] pg3200.txt +rome: pg3200.txt, pg100.txt +rome; pg100.txt +rome? pg100.txt +romeo pg100.txt +romeo! pg100.txt +romeo, pg100.txt +romeo. pg100.txt +romeo? pg100.txt +romped pg3200.txt +romped, pg3200.txt +romping pg3200.txt +romps pg3200.txt +romulus. pg3200.txt +ronalds pg3200.txt +ronyon! pg100.txt +rood, pg100.txt +roof pg31100.txt, pg3200.txt, pg100.txt +roof'd, pg100.txt +roof, pg31100.txt, pg3200.txt, pg100.txt +roof--and pg3200.txt +roof. pg31100.txt, pg3200.txt +roof." pg31100.txt +roof; pg3200.txt +roofed pg3200.txt +roofs pg31100.txt, pg3200.txt, pg100.txt +rook. pg100.txt +room pg31100.txt, pg3200.txt, pg100.txt +room! pg3200.txt +room!" pg31100.txt +room, pg31100.txt, pg3200.txt, pg100.txt +room," pg31100.txt +room,--"i pg31100.txt +room-- pg3200.txt +room--"is pg31100.txt +room--but pg31100.txt +room-mate pg3200.txt +room-mate, pg3200.txt +room-mate. pg3200.txt +room. pg31100.txt, pg3200.txt, pg100.txt +room." pg31100.txt, pg3200.txt +room.) pg3200.txt +room.- pg100.txt +room: pg3200.txt +room; pg31100.txt, pg3200.txt +room? pg3200.txt +room?" pg31100.txt, pg3200.txt +roomed pg3200.txt +roominess pg3200.txt +roommate, pg3200.txt +rooms pg31100.txt, pg3200.txt, pg100.txt +rooms, pg31100.txt, pg3200.txt +rooms. pg31100.txt, pg3200.txt +rooms." pg31100.txt +rooms?" pg31100.txt, pg3200.txt +roomy pg3200.txt +roomy, pg3200.txt +roomy. pg31100.txt +roop pg3200.txt +roop. pg3200.txt +roos' pg3200.txt +roosevelt pg3200.txt +roosevelt, pg3200.txt +roost pg3200.txt +roosted pg3200.txt +rooster pg3200.txt +rooster, pg3200.txt +roosterish pg3200.txt +roosting pg3200.txt +root pg3200.txt, pg100.txt +root! pg100.txt +root, pg100.txt +root. pg100.txt +root; pg100.txt +root? pg100.txt +root] pg100.txt +rooted pg3200.txt, pg100.txt +rooted, pg3200.txt +roots pg100.txt +roots, pg3200.txt, pg100.txt +roots. pg100.txt +roots." pg3200.txt +roots; pg100.txt +rope pg3200.txt +rope! pg100.txt +rope!' pg3200.txt, pg100.txt +rope's-end pg100.txt +rope's-end.' pg100.txt +rope, pg3200.txt +rope--a pg3200.txt +rope--and pg3200.txt +rope--two pg3200.txt +rope-walk pg3200.txt +rope. pg3200.txt, pg100.txt +rope; pg3200.txt, pg100.txt +rope? pg100.txt +rope] pg3200.txt +roped pg3200.txt +ropery? pg100.txt +ropes pg3200.txt +ropes, pg3200.txt +roque, pg3200.txt +rosa pg3200.txt +rosa. pg3200.txt +rosalind pg100.txt +rosalind! pg100.txt +rosalind, pg100.txt +rosalind. pg100.txt +rosalind? pg100.txt +rosalinde. pg100.txt +rosalinde.' pg100.txt +rosaline pg100.txt +rosaline! pg100.txt +rosaline, pg100.txt +rosaline. pg100.txt +rosaline? pg100.txt +rosannah pg3200.txt +rosannah!" pg3200.txt +rosannah, pg3200.txt +rosannah." pg3200.txt +rosannah?" pg3200.txt +roscicrucian pg3200.txt +rose pg31100.txt, pg3200.txt, pg100.txt +rose, pg3200.txt, pg100.txt +rose--an pg3200.txt +rose--and pg3200.txt +rose--that pg3200.txt +rose-clad pg3200.txt +rose-leaves pg3200.txt +rose. pg3200.txt, pg100.txt +rose; pg3200.txt, pg100.txt +rose?" pg31100.txt +rose] pg100.txt +rosebud. pg3200.txt +rosemary pg100.txt +rosemary, pg100.txt +rosemary; pg100.txt +rosencrantz, pg100.txt +rosencrantz. pg100.txt +rosenthal pg3200.txt +roses pg3200.txt, pg100.txt +roses, pg3200.txt, pg100.txt +roses; pg31100.txt, pg100.txt +rosettes pg3200.txt +rosin pg3200.txt +rosings pg31100.txt +rosings, pg31100.txt +rosings. pg31100.txt +rosings." pg31100.txt +rosings?" pg31100.txt +ross, pg100.txt +ross. pg100.txt +rossm--er--" pg3200.txt +rossmore pg3200.txt +rossmore' pg3200.txt +rossmore, pg3200.txt +rossmore. pg3200.txt +rossmore." pg3200.txt +rossmore: pg3200.txt +roster. pg3200.txt +rostrum pg3200.txt +rosy pg3200.txt +rosy-morn!" pg3200.txt +rot pg3200.txt, pg100.txt +rot! pg100.txt +rot. pg3200.txt +rot." pg3200.txt +rot; pg3200.txt, pg100.txt +rot? pg100.txt +rote, pg100.txt +roted pg100.txt +rotorua pg3200.txt +rotten pg3200.txt, pg100.txt +rotten, pg100.txt +rotten. pg100.txt +rotten." pg3200.txt +rotten; pg3200.txt +rottenness! pg100.txt +rotterdam, pg3200.txt +rotting pg3200.txt, pg100.txt +rotulorum." pg3200.txt +rouen pg3200.txt, pg100.txt +rouen!" pg3200.txt +rouen, pg3200.txt, pg100.txt +rouen. pg3200.txt, pg100.txt +rouen; pg100.txt +rouge pg31100.txt +rouge, pg3200.txt +rouges pg31100.txt +rough pg31100.txt, pg3200.txt, pg100.txt +rough, pg3200.txt, pg100.txt +rough. pg3200.txt, pg100.txt +rough." pg3200.txt +rough; pg100.txt +rough? pg100.txt +rougher. pg3200.txt +roughest pg3200.txt +roughing pg3200.txt +roughly pg3200.txt, pg100.txt +roughly-shaped pg3200.txt +roughly: pg3200.txt +roughness pg3200.txt +roughs; pg3200.txt +roumania, pg3200.txt +roun' pg3200.txt +roun'bout pg3200.txt +round pg31100.txt, pg3200.txt, pg100.txt +round! pg100.txt +round!" pg31100.txt +round, pg31100.txt, pg3200.txt, pg100.txt +round," pg3200.txt +round. pg31100.txt, pg3200.txt, pg100.txt +round." pg31100.txt, pg3200.txt +round.' pg100.txt +round? pg3200.txt, pg100.txt +roundabout pg31100.txt, pg3200.txt +roundabout. pg3200.txt +rounded pg3200.txt, pg100.txt +rounded, pg3200.txt +rounder. pg100.txt +rounding pg3200.txt +rounding, pg100.txt +roundly. pg100.txt +rounds pg3200.txt +rounds, pg3200.txt +rous'd! pg100.txt +rouse pg31100.txt, pg3200.txt, pg100.txt +rouse, pg100.txt +rouse. pg100.txt +rouse: pg3200.txt +rouse; pg100.txt +roused pg31100.txt, pg3200.txt +roused, pg31100.txt +rousillon pg100.txt +rousillon, pg100.txt +rousillon. pg100.txt +rousillon; pg100.txt +rousillon? pg100.txt +rousing pg31100.txt, pg3200.txt +rousing, pg3200.txt +roust pg3200.txt +roustabouts pg3200.txt +rout pg3200.txt, pg100.txt +rout, pg100.txt +rout. pg100.txt +rout; pg100.txt +route pg3200.txt +route, pg31100.txt, pg3200.txt +route. pg3200.txt +route." pg3200.txt +route; pg3200.txt +routed pg3200.txt +routed, pg3200.txt +routes pg3200.txt +routes, pg3200.txt +routine pg3200.txt +routine, pg3200.txt +routs pg100.txt +routs, pg100.txt +rove pg100.txt +roving pg3200.txt +row pg31100.txt, pg3200.txt +row, pg3200.txt +row. pg3200.txt +row." pg3200.txt +row; pg3200.txt +rowboats, pg3200.txt +rowdies pg3200.txt +rowdy pg3200.txt +rowel pg100.txt +rowena pg3200.txt +rowena--" pg3200.txt +rowena. pg3200.txt +roweny"--handshake. pg3200.txt +rowing pg3200.txt +rowland pg100.txt +rowland, pg100.txt +rowling pg31100.txt +rows pg3200.txt +rows, pg3200.txt +rows--stacked pg3200.txt +roxana pg3200.txt +roxy pg3200.txt +roxy!" pg3200.txt +roxy's pg3200.txt +roxy, pg3200.txt +roxy. pg3200.txt +roxy." pg3200.txt +roxy; pg3200.txt +roxy?" pg3200.txt +roy pg3200.txt +roy. pg100.txt +royal pg3200.txt, pg100.txt +royal! pg100.txt +royal"-- pg3200.txt +royal, pg3200.txt, pg100.txt +royal- pg100.txt +royal--" pg3200.txt +royal. pg3200.txt, pg100.txt +royal." pg3200.txt +royal: pg100.txt +royal;' pg3200.txt +royalist, pg3200.txt +royally pg3200.txt, pg100.txt +royally! pg100.txt +royals pg3200.txt +royals. pg3200.txt +royalties pg3200.txt, pg100.txt +royalties, pg3200.txt +royalties. pg3200.txt +royalty pg3200.txt, pg100.txt +royalty, pg3200.txt, pg100.txt +royalty--maybe pg3200.txt +royalty. pg3200.txt, pg100.txt +royalty: pg3200.txt +royalty; pg3200.txt +royalty?" pg3200.txt +roystering, pg3200.txt +rub pg31100.txt, pg3200.txt +rub! pg100.txt +rub, pg100.txt +rubbage pg3200.txt +rubbage, pg3200.txt +rubbage-pile pg3200.txt +rubbed pg3200.txt +rubber; pg31100.txt +rubbers pg31100.txt +rubbing pg3200.txt +rubbish pg3200.txt +rubbish!" pg3200.txt +rubbish, pg3200.txt +rubbish--and pg3200.txt +rubbish-pile pg3200.txt +rubbish. pg3200.txt +rubbish; pg3200.txt +rubbish?" pg3200.txt +rubies pg3200.txt +rubies, pg100.txt +rubs pg3200.txt, pg100.txt +ruck pg3200.txt +rudder-blade. pg3200.txt +rudder-post!" pg3200.txt +rudder. pg100.txt +rudder?' pg3200.txt +ruddy pg3200.txt +ruddy, pg3200.txt +rude pg31100.txt, pg3200.txt +rude, pg3200.txt +rude. pg100.txt +rude; pg100.txt +rude? pg3200.txt +rude?" pg31100.txt +rudely pg3200.txt +rudeness pg100.txt +rudeness, pg31100.txt +rudeness." pg3200.txt +rudeness; pg31100.txt, pg3200.txt +rudest pg3200.txt +rudiments pg100.txt +rue pg3200.txt, pg100.txt +rue, pg100.txt +rue. pg100.txt +ruefully pg3200.txt +ruff pg3200.txt, pg100.txt +ruffian pg3200.txt, pg100.txt +ruffian, pg3200.txt, pg100.txt +ruffian--" pg3200.txt +ruffians pg3200.txt +ruffled pg3200.txt +ruffled, pg3200.txt +ruffler pg3200.txt +ruffler, pg3200.txt +ruffles pg3200.txt +ruffles. pg3200.txt +ruffles; pg3200.txt +rugby pg100.txt +rugby! pg100.txt +rugby, pg100.txt +rugby. pg100.txt +rugby? pg100.txt +rugby] pg100.txt +rugged pg3200.txt +rugged, pg100.txt +ruggednesses, pg3200.txt +ruin pg31100.txt, pg3200.txt, pg100.txt +ruin! pg3200.txt, pg100.txt +ruin!" pg3200.txt +ruin" pg3200.txt +ruin'd, pg100.txt +ruin's pg3200.txt +ruin, pg3200.txt, pg100.txt +ruin. pg3200.txt, pg100.txt +ruin." pg31100.txt, pg3200.txt +ruin; pg3200.txt +ruinate pg100.txt +ruined pg31100.txt, pg3200.txt +ruined!" pg3200.txt +ruined, pg3200.txt +ruined--i pg3200.txt +ruined. pg31100.txt, pg3200.txt +ruined." pg3200.txt +ruining? pg100.txt +ruinous pg3200.txt +ruinous? pg100.txt +ruinously pg3200.txt +ruins pg3200.txt, pg100.txt +ruins, pg3200.txt +ruins. pg3200.txt +ruins; pg3200.txt +rul'd pg100.txt +rul'd. pg100.txt +rul'd; pg100.txt +rule pg31100.txt, pg3200.txt, pg100.txt +rule, pg3200.txt, pg100.txt +rule," pg3200.txt +rule. pg31100.txt, pg3200.txt, pg100.txt +rule." pg3200.txt +rule.' pg3200.txt +rule; pg3200.txt, pg100.txt +rule? pg3200.txt, pg100.txt +rule?' pg3200.txt +ruled pg3200.txt +ruled. pg100.txt +ruler. pg100.txt +rulers, pg3200.txt +rulers. pg3200.txt +rules pg31100.txt, pg3200.txt +rules! pg3200.txt +rules, pg31100.txt, pg3200.txt +rules--it's pg3200.txt +rules." pg3200.txt +rules: pg3200.txt +ruling, pg3200.txt +rulings pg3200.txt +rum pg31100.txt, pg3200.txt +rum, pg3200.txt +rum. pg3200.txt +rumania pg3200.txt +rumbling pg3200.txt +rumblings pg3200.txt +ruminat- pg100.txt +ruminate pg100.txt +ruminated. pg100.txt +ruminating, pg3200.txt +rummage.' pg3200.txt +rummaging. pg3200.txt +rumor pg100.txt +rumored,' pg3200.txt +rumors pg3200.txt +rumour pg31100.txt, pg100.txt +rumour'd, pg100.txt +rumours pg3200.txt +rumours. pg100.txt +rump pg3200.txt +rumpled pg3200.txt +run pg31100.txt, pg3200.txt, pg100.txt +run! pg100.txt +run!" pg3200.txt +run'st pg100.txt +run, pg3200.txt, pg100.txt +run- pg100.txt +run. pg3200.txt, pg100.txt +run." pg3200.txt +run; pg100.txt +run? pg100.txt +runagates? pg100.txt +runaway pg3200.txt +runaway, pg3200.txt, pg100.txt +runaways, pg100.txt +runaways. pg100.txt +rung. pg100.txt +runlets pg3200.txt +runner. pg100.txt +running pg31100.txt, pg3200.txt, pg100.txt +running! pg100.txt +running, pg3200.txt, pg100.txt +running. pg3200.txt, pg100.txt +runs pg3200.txt, pg100.txt +runs, pg3200.txt, pg100.txt +rupee pg3200.txt +rupee; pg3200.txt +rupees pg3200.txt +rupees) pg3200.txt +rupture pg31100.txt, pg3200.txt +rupture:--edward pg31100.txt +rural pg3200.txt +ruralizing pg3200.txt +ruse pg3200.txt +rush pg3200.txt, pg100.txt +rush, pg3200.txt, pg100.txt +rush-candle, pg100.txt +rush. pg3200.txt +rush; pg3200.txt +rushed pg31100.txt, pg3200.txt +rushes pg3200.txt, pg100.txt +rushes! pg100.txt +rushes; pg100.txt +rushing pg31100.txt, pg3200.txt, pg100.txt +rushworth pg31100.txt +rushworth!" pg31100.txt +rushworth's pg31100.txt +rushworth, pg31100.txt +rushworth. pg31100.txt +rushworth." pg31100.txt +ruskin pg3200.txt +ruskin. pg3200.txt +russell pg31100.txt, pg3200.txt +russell's pg31100.txt +russell, pg31100.txt +russell-- pg3200.txt +russell. pg31100.txt, pg3200.txt +russell." pg31100.txt +russell; pg31100.txt +russia pg3200.txt +russia, pg3200.txt, pg100.txt +russia--gratitude pg3200.txt +russia. pg3200.txt +russia: pg3200.txt +russia? pg3200.txt +russia?" pg3200.txt +russian pg3200.txt, pg100.txt +russians pg3200.txt, pg100.txt +russians! pg100.txt +russians. pg3200.txt +russias pg3200.txt +rust pg3200.txt, pg100.txt +rust, pg100.txt +rusted pg3200.txt +rustic pg31100.txt +rustic's pg3200.txt +rustics pg3200.txt +rustle. pg100.txt +rustled pg3200.txt +rustler pg3200.txt +rustler, pg3200.txt +rustling pg3200.txt +rusts pg100.txt +rusty pg3200.txt +rusty, pg3200.txt +rusty; pg3200.txt +rusty? pg100.txt +ruth pg3200.txt, pg100.txt +ruth! pg100.txt +ruth's pg3200.txt +ruth, pg3200.txt +ruth. pg3200.txt +ruth?" pg3200.txt +ruther pg3200.txt +rutland pg100.txt +rutland- pg100.txt +rutland; pg100.txt +rutland? pg100.txt +rutli pg3200.txt +rye, pg100.txt +s'en pg3200.txt +s'i, pg3200.txt +s'i--" pg3200.txt +s'i." pg3200.txt +s'i? pg3200.txt +s'il pg3200.txt +s'pose pg3200.txt +s'prised pg3200.txt +s'r!" pg3200.txt +s,' pg31100.txt +s--on pg3200.txt +s. pg31100.txt, pg3200.txt, pg100.txt +s." pg3200.txt +s., pg3200.txt +s.l.c. pg3200.txt +s/he pg31100.txt, pg3200.txt, pg100.txt +sa! pg100.txt +saa, pg3200.txt +saba pg100.txt +saba, pg3200.txt +sabbath pg3200.txt +sabbath, pg3200.txt +sabbath," pg3200.txt +sabbath-day's pg3200.txt +sabbath-day." pg3200.txt +sabbath-school, pg3200.txt +sabbath. pg3200.txt +sabe pg3200.txt +saber; pg3200.txt +sabines, pg3200.txt +sable pg3200.txt +sable, pg3200.txt +sac--however, pg3200.txt +sack pg3200.txt, pg100.txt +sack!" pg3200.txt +sack, pg3200.txt, pg100.txt +sack--it pg3200.txt +sack. pg3200.txt, pg100.txt +sack; pg3200.txt +sack? pg100.txt +sack?" pg3200.txt +sackcloth pg3200.txt +sackingen, pg3200.txt +sacks pg31100.txt, pg3200.txt +sacks!" pg3200.txt +sacks--one pg3200.txt +sacks. pg3200.txt +sacrament pg100.txt +sacrament, pg100.txt +sacrament?" pg3200.txt +sacramento pg3200.txt +sacred pg31100.txt, pg3200.txt, pg100.txt +sacred, pg3200.txt +sacred; pg3200.txt +sacredness pg3200.txt +sacrifice pg31100.txt, pg3200.txt, pg100.txt +sacrifice! pg3200.txt, pg100.txt +sacrifice, pg3200.txt, pg100.txt +sacrifice. pg31100.txt, pg3200.txt, pg100.txt +sacrifice." pg31100.txt +sacrifice; pg31100.txt, pg100.txt +sacrifice] pg3200.txt +sacrificed pg3200.txt +sacrificers--sacred pg3200.txt +sacrifices pg31100.txt +sacrifices, pg31100.txt +sacrifices. pg31100.txt, pg3200.txt, pg100.txt +sacrificing pg31100.txt, pg3200.txt +sacrilege, pg3200.txt +sacrilegious pg3200.txt +sacrilegious, pg3200.txt +sacristy pg3200.txt +sad pg31100.txt, pg3200.txt, pg100.txt +sad! pg100.txt +sad, pg3200.txt, pg100.txt +sad- pg3200.txt, pg100.txt +sad. pg3200.txt, pg100.txt +sad.) pg3200.txt +sad: pg3200.txt +sad; pg3200.txt, pg100.txt +sad? pg100.txt +sad?" pg3200.txt +sadden pg3200.txt +saddened pg3200.txt +saddened. pg3200.txt +saddening pg3200.txt +sadder pg3200.txt +sadder. pg100.txt +saddest pg3200.txt +saddest. pg3200.txt +saddle pg3200.txt +saddle, pg3200.txt +saddle----" pg3200.txt +saddle--with pg3200.txt +saddle-bag. pg3200.txt +saddle-baggsed pg3200.txt +saddle-bags." pg3200.txt +saddle-girth pg3200.txt +saddle-horn, pg3200.txt +saddle. pg3200.txt +saddle; pg100.txt +saddled, pg3200.txt +saddles pg3200.txt +saddles, pg3200.txt +saddles. pg3200.txt +saddles] pg3200.txt +sadful. pg3200.txt +sadly pg31100.txt, pg3200.txt, pg100.txt +sadly. pg3200.txt +sadly." pg31100.txt +sadly? pg100.txt +sadness pg31100.txt, pg3200.txt +sadness. pg3200.txt, pg100.txt +saewulf, pg3200.txt +safe pg31100.txt, pg3200.txt, pg100.txt +safe! pg3200.txt, pg100.txt +safe!" pg3200.txt +safe, pg31100.txt, pg3200.txt, pg100.txt +safe--if pg3200.txt +safe--nobody pg3200.txt +safe--safe! pg3200.txt +safe--this pg3200.txt +safe-guarded pg3200.txt +safe. pg31100.txt, pg3200.txt, pg100.txt +safe." pg31100.txt, pg3200.txt +safe; pg31100.txt, pg3200.txt, pg100.txt +safe? pg3200.txt, pg100.txt +safe?" pg3200.txt +safeguard pg3200.txt, pg100.txt +safely pg31100.txt, pg3200.txt, pg100.txt +safely, pg31100.txt, pg100.txt +safer pg31100.txt, pg3200.txt, pg100.txt +safer, pg3200.txt +safer. pg100.txt +safest pg31100.txt, pg3200.txt +safest." pg3200.txt +safety pg31100.txt, pg3200.txt, pg100.txt +safety! pg3200.txt, pg100.txt +safety, pg31100.txt, pg100.txt +safety,- pg100.txt +safety- pg3200.txt +safety-razor. pg3200.txt +safety-valve!" pg3200.txt +safety-valve" pg3200.txt +safety. pg31100.txt, pg3200.txt, pg100.txt +safety." pg31100.txt, pg3200.txt +safety: pg100.txt +safety; pg31100.txt +safety] pg3200.txt +saffron pg3200.txt +sag.; pg3200.txt +sagacious pg3200.txt +sagacious, pg3200.txt +sagacity pg31100.txt, pg3200.txt +sagacity. pg31100.txt, pg3200.txt +sagamore. pg3200.txt +sage pg3200.txt +sage, pg3200.txt +sage-brush pg3200.txt +sage-brush, pg3200.txt +sage-brush; pg3200.txt +sage-bush pg3200.txt +sage-bush, pg3200.txt +sage-bush; pg3200.txt +sage. pg3200.txt +sages pg3200.txt +sagged pg3200.txt +sagittary pg100.txt +sagittary, pg100.txt +sagramor pg3200.txt +sagramor's pg3200.txt +sagramor, pg3200.txt +sah!" pg3200.txt +sah, pg3200.txt +sah,' pg3200.txt +sah." pg3200.txt +sah.' pg3200.txt +sahara pg3200.txt +sahara. pg3200.txt +saharas pg3200.txt +sahib's pg3200.txt +sahibs pg3200.txt +said! pg31100.txt, pg100.txt +said!" pg3200.txt +said) pg31100.txt +said), pg3200.txt +said, pg31100.txt, pg3200.txt, pg100.txt +said,-- pg31100.txt, pg3200.txt +said-- pg31100.txt, pg3200.txt +said--" pg3200.txt +said--"a pg3200.txt +said--"i pg3200.txt +said--' pg3200.txt +said--'gents, pg3200.txt +said---- pg3200.txt +said-----" pg3200.txt +said--] pg3200.txt +said--and pg3200.txt +said--delivered pg3200.txt +said--he pg3200.txt +said--to pg3200.txt +said. pg31100.txt, pg3200.txt, pg100.txt +said." pg31100.txt, pg3200.txt +said."--she pg31100.txt +said.' pg3200.txt +said: pg31100.txt, pg3200.txt, pg100.txt +said:-- pg31100.txt, pg3200.txt +said; pg31100.txt, pg3200.txt, pg100.txt +said;-- pg31100.txt +said? pg31100.txt, pg100.txt +said?" pg3200.txt +said] pg3200.txt +sail pg3200.txt, pg100.txt +sail! pg3200.txt, pg100.txt +sail!" pg100.txt +sail'd pg100.txt +sail, pg3200.txt, pg100.txt +sail- pg100.txt +sail--and pg3200.txt +sail-boat, pg3200.txt +sail. pg3200.txt, pg100.txt +sail.'" pg3200.txt +sail; pg100.txt +sailboats pg3200.txt +sailboats, pg3200.txt +sailed pg31100.txt, pg3200.txt +sailed," pg3200.txt +sailed--she pg3200.txt +sailed. pg3200.txt +sailed; pg3200.txt +sailin' pg3200.txt +sailing pg3200.txt, pg100.txt +sailing, pg3200.txt +sailing-ship. pg3200.txt +sailing-vessel pg3200.txt +sailing-vessel. pg3200.txt +sailor pg31100.txt, pg3200.txt +sailor, pg31100.txt, pg3200.txt +sailor-profanity pg3200.txt +sailor-talk. pg3200.txt +sailor. pg100.txt +sailor." pg31100.txt +sailors pg31100.txt, pg3200.txt, pg100.txt +sailors, pg3200.txt +sailors. pg31100.txt, pg3200.txt, pg100.txt +sailors." pg31100.txt +sailors: pg3200.txt +sailors; pg100.txt +sailors? pg100.txt +sails pg3200.txt, pg100.txt +sails, pg31100.txt, pg3200.txt, pg100.txt +sails. pg3200.txt +sails; pg100.txt +sain. pg100.txt +saint pg3200.txt, pg100.txt +saint, pg31100.txt, pg3200.txt, pg100.txt +saint. pg31100.txt, pg3200.txt, pg100.txt +saint; pg100.txt +saint? pg100.txt +sainte pg3200.txt +sainted, pg100.txt +saintlike pg100.txt +saintrailles pg3200.txt +saintrailles. pg3200.txt +saints pg3200.txt +saints' pg3200.txt +saints, pg3200.txt, pg100.txt +saints. pg3200.txt, pg100.txt +saints.* pg3200.txt +sair--just pg3200.txt +sairey pg3200.txt +saith pg3200.txt, pg100.txt +saith- pg100.txt +sake pg31100.txt, pg3200.txt, pg100.txt +sake! pg31100.txt, pg3200.txt, pg100.txt +sake!" pg31100.txt, pg3200.txt +sake) pg100.txt +sake, pg31100.txt, pg3200.txt, pg100.txt +sake--and pg31100.txt +sake--be pg31100.txt +sake. pg31100.txt, pg3200.txt, pg100.txt +sake." pg31100.txt, pg3200.txt +sake.' pg100.txt +sake: pg100.txt +sake; pg31100.txt, pg3200.txt, pg100.txt +sake? pg3200.txt, pg100.txt +sake?" pg31100.txt, pg3200.txt +sakes! pg3200.txt +sakes!" pg3200.txt +sakes, pg3200.txt +sakes." pg31100.txt +sakes; pg3200.txt +sakes?" pg3200.txt +sakhi pg3200.txt +sakka pg3200.txt +sala's. pg3200.txt +sala, pg100.txt +salaaming pg3200.txt +salable, pg3200.txt +salable. pg3200.txt +salad, pg3200.txt +salad-dressing. pg3200.txt +saladin pg3200.txt +salads, pg3200.txt +salamander pg3200.txt +salaried pg3200.txt +salaries pg3200.txt +salaries, pg3200.txt +salaries. pg3200.txt +salary pg31100.txt, pg3200.txt +salary, pg3200.txt +salary. pg3200.txt +salary.] pg3200.txt +sale pg3200.txt +sale, pg3200.txt +sale,' pg100.txt +sale. pg3200.txt +sale.] pg3200.txt +sale; pg3200.txt +salerio pg100.txt +salerio! pg100.txt +salerio, pg100.txt +salerio? pg100.txt +sales pg3200.txt +sales-stables pg3200.txt +salesman; pg3200.txt +salesmen pg3200.txt +salisbury pg100.txt +salisbury! pg100.txt +salisbury) pg100.txt +salisbury, pg100.txt +salisbury. pg100.txt +salisbury; pg100.txt +salisbury? pg100.txt +sall pg100.txt +salle pg3200.txt +sallet, pg100.txt +sallied pg3200.txt +sallow pg3200.txt +sally pg31100.txt, pg3200.txt +sally!" pg3200.txt +sally--" pg3200.txt +sally. pg3200.txt +sally." pg3200.txt +sally?" pg3200.txt +sally]. pg3200.txt +salons pg3200.txt +saloon pg3200.txt +saloon, pg31100.txt, pg3200.txt +saloon-keeper pg3200.txt +saloon. pg3200.txt +saloons pg3200.txt +salt pg3200.txt, pg100.txt +salt, pg3200.txt, pg100.txt +salt-cellar: pg3200.txt +salt-cellars; pg3200.txt +salt-cod pg3200.txt +salt. pg3200.txt +salt; pg3200.txt, pg100.txt +salt?" pg3200.txt +salthaven pg3200.txt +salthaven. pg3200.txt +saltire pg3200.txt +saltmarsh pg3200.txt +salutation pg3200.txt +salutation, pg3200.txt +salutation. pg100.txt +salutation: pg3200.txt +salutations; pg100.txt +salute pg3200.txt, pg100.txt +salute, pg3200.txt +salute. pg3200.txt +salute] pg100.txt +saluted pg3200.txt +saluted, pg3200.txt +saluted. pg3200.txt +salutes pg3200.txt +salutes; pg3200.txt +saluteth pg100.txt +saluting: pg3200.txt +salvation pg3200.txt +salvation, pg3200.txt, pg100.txt +salvation-notions pg3200.txt +salvation. pg3200.txt +salvation." pg3200.txt +salvation? pg100.txt +salve pg3200.txt, pg100.txt +salve? pg100.txt +salver. pg3200.txt +salvo pg3200.txt +sam pg3200.txt +sam" pg3200.txt +sam," pg3200.txt +sam. pg3200.txt +samaritan' pg3200.txt +same pg31100.txt, pg3200.txt, pg100.txt +same! pg100.txt +same, pg31100.txt, pg3200.txt, pg100.txt +same," pg31100.txt +same- pg100.txt +same-- pg3200.txt +same--" pg3200.txt +same--' pg3200.txt +same--because, pg3200.txt +same--labyrinth pg3200.txt +same--old, pg3200.txt +same--they pg3200.txt +same--thirteen pg3200.txt +same. pg31100.txt, pg3200.txt, pg100.txt +same." pg31100.txt, pg3200.txt +same.] pg3200.txt +same; pg31100.txt, pg3200.txt, pg100.txt +same? pg3200.txt, pg100.txt +same?" pg31100.txt, pg3200.txt +sameness pg3200.txt +sameness. pg3200.txt +samingo. pg100.txt +saml. pg3200.txt +sammy pg3200.txt +samoa pg3200.txt +samoa, pg3200.txt +sample pg3200.txt +sample. pg3200.txt +sample: pg3200.txt +sample; pg3200.txt +sampled pg3200.txt +samples pg3200.txt +samples. pg3200.txt +samples: pg3200.txt +sampling pg3200.txt +sampson pg100.txt +samson pg3200.txt +samuel pg3200.txt +samuel. pg3200.txt +samuel." pg3200.txt +san pg3200.txt +san'." pg3200.txt +sanctified pg3200.txt, pg100.txt +sanctified, pg100.txt +sanctified. pg3200.txt, pg100.txt +sanctify. pg100.txt +sanctimonies, pg100.txt +sanctimonious pg3200.txt +sanctimony pg100.txt +sanction pg31100.txt +sanctioned. pg3200.txt +sanctioned.' pg31100.txt +sanctity pg3200.txt +sanctity? pg100.txt +sanctuaries pg3200.txt +sanctuarize; pg100.txt +sanctuary pg3200.txt, pg100.txt +sanctuary, pg100.txt +sanctuary. pg100.txt +sand pg3200.txt +sand, pg3200.txt, pg100.txt +sand,) pg3200.txt +sand-bags." pg3200.txt +sand-bar pg3200.txt +sand-bars pg3200.txt +sand-beaches; pg3200.txt +sand-belt! pg3200.txt +sand-quarry." pg3200.txt +sand-storm pg3200.txt +sand. pg3200.txt +sand?" pg3200.txt +sandbag, pg100.txt +sandbar pg3200.txt +sandbar. pg3200.txt +sands pg3200.txt, pg100.txt +sands, pg31100.txt, pg100.txt +sands. pg100.txt +sands; pg100.txt +sandstone pg3200.txt +sandstone, pg3200.txt +sandstone." pg3200.txt +sandwich pg3200.txt +sandwich, pg3200.txt +sandwich. pg3200.txt +sandwiches pg3200.txt +sandwiches, pg31100.txt +sandwiches?" pg3200.txt +sandy pg3200.txt +sandy's pg3200.txt +sandy, pg3200.txt +sandy-haired. pg31100.txt +sandy. pg3200.txt +sandy." pg3200.txt +sandy?" pg3200.txt +sandy?..." pg3200.txt +sandys pg100.txt +sandys, pg100.txt +sandys. pg100.txt +sandys; pg100.txt +sane pg3200.txt +sane, pg3200.txt +sane. pg3200.txt +sane; pg3200.txt +sanely pg3200.txt +saner pg3200.txt +sanfrancisco. pg3200.txt +sang pg31100.txt, pg3200.txt +sang-froid, pg31100.txt +sang. pg3200.txt, pg100.txt +sang: pg3200.txt +sang; pg3200.txt +sanguine pg31100.txt, pg100.txt +sanguine, pg31100.txt +sanguine." pg31100.txt +sanguinely pg31100.txt +sanhedrin. pg3200.txt +sanhedrin; pg3200.txt +sanity pg3200.txt +sanity. pg3200.txt +sanity." pg3200.txt +sank pg3200.txt +sank, pg3200.txt +sans pg3200.txt +santrailles; pg100.txt +sap pg3200.txt +sap, pg100.txt +sap? pg100.txt +saphead." pg3200.txt +sapheads." pg3200.txt +sapling, pg3200.txt +sapped pg3200.txt +sapping pg3200.txt +sappy pg3200.txt +sara- pg3200.txt +saracens, pg3200.txt +saracens. pg3200.txt +saracens; pg100.txt +sarah pg3200.txt +saratoga, pg3200.txt +sarcasm pg3200.txt +sarcasm, pg3200.txt +sarcasm. pg3200.txt +sarcasm." pg3200.txt +sarcasm: pg3200.txt +sarcasms, pg3200.txt +sarcasms--the pg3200.txt +sarcasms. pg3200.txt +sarcastic pg3200.txt +sarcastic. pg3200.txt +sarcastic: pg3200.txt +sarcastically-- pg31100.txt +sarcastically: pg3200.txt +sardanapalus, pg3200.txt +sardians, pg100.txt +sardines. pg3200.txt +sardinia. pg3200.txt +sardinia; pg100.txt +sargent.'" pg3200.txt +sarony pg3200.txt +sarsaparilla." pg3200.txt +sarto--" pg3200.txt +sarto?" pg3200.txt +sash pg3200.txt +sash, pg3200.txt +sash. pg3200.txt +sashed pg31100.txt +sashes pg3200.txt +sass pg3200.txt +sassoferrato." pg3200.txt +sat pg31100.txt, pg3200.txt, pg100.txt +sat, pg31100.txt, pg3200.txt, pg100.txt +sat." pg3200.txt +sat? pg100.txt +satan pg3200.txt +satan's pg3200.txt +satan, pg3200.txt +satan. pg3200.txt, pg100.txt +satan." pg3200.txt +satan; pg3200.txt +satan? pg100.txt +satan?" pg3200.txt +satchel pg3200.txt, pg100.txt +satchels pg3200.txt +satchels, pg3200.txt +sate pg3200.txt +satellites pg3200.txt +satin pg31100.txt +satin, pg3200.txt +satins pg3200.txt +satins, pg3200.txt +satire pg3200.txt, pg100.txt +satire, pg3200.txt +satire. pg3200.txt +satirist pg3200.txt +satisfaction pg31100.txt, pg3200.txt, pg100.txt +satisfaction, pg31100.txt, pg3200.txt, pg100.txt +satisfaction-- pg3200.txt +satisfaction--her pg31100.txt +satisfaction. pg31100.txt, pg3200.txt, pg100.txt +satisfaction." pg3200.txt +satisfaction.--he pg31100.txt +satisfaction: pg31100.txt, pg3200.txt +satisfaction; pg3200.txt, pg100.txt +satisfaction? pg100.txt +satisfactions pg31100.txt +satisfactorily pg3200.txt +satisfactorily, pg3200.txt +satisfactorily. pg3200.txt +satisfactory pg31100.txt, pg3200.txt +satisfactory, pg3200.txt +satisfactory-- pg3200.txt +satisfactory. pg3200.txt +satisfactory." pg31100.txt, pg3200.txt +satisfactory: pg3200.txt +satisfactory?" pg3200.txt +satisfait. pg3200.txt +satisfied pg31100.txt, pg3200.txt, pg100.txt +satisfied! pg100.txt +satisfied, pg31100.txt, pg3200.txt, pg100.txt +satisfied--although pg3200.txt +satisfied--he pg3200.txt +satisfied--satisfied pg3200.txt +satisfied--so pg31100.txt +satisfied--unaccountable pg31100.txt +satisfied-looking pg3200.txt +satisfied-whether pg100.txt +satisfied. pg31100.txt, pg3200.txt, pg100.txt +satisfied." pg31100.txt +satisfied; pg3200.txt, pg100.txt +satisfied? pg100.txt +satisfied?" pg3200.txt +satisfies pg31100.txt +satisfy pg31100.txt, pg3200.txt, pg100.txt +satisfy! pg100.txt +satisfy" pg3200.txt +satisfy, pg100.txt +satisfy. pg3200.txt, pg100.txt +satisfy." pg31100.txt +satisfying pg31100.txt, pg3200.txt +satisfying; pg31100.txt +satt? pg3200.txt +sattin pg31100.txt +saturated pg3200.txt +saturday pg31100.txt, pg3200.txt +saturday, pg3200.txt +saturday," pg3200.txt +saturday--and pg31100.txt +saturday. pg3200.txt +saturday." pg31100.txt, pg3200.txt +saturday; pg3200.txt +saturday?" pg3200.txt +saturnine! pg100.txt +saturnine!' pg100.txt +saturnine, pg100.txt +saturnine. pg100.txt +saturnine: pg100.txt +saturninus pg100.txt +saturninus, pg100.txt +saturninus. pg100.txt +satyrs pg100.txt +sau- pg3200.txt +sauce pg3200.txt, pg100.txt +sauce-box?" pg3200.txt +sauce. pg100.txt +sauced pg100.txt +saucer pg3200.txt +saucer, pg3200.txt +sauces, pg3200.txt +sauciness; pg100.txt +saucy, pg100.txt +saucy. pg100.txt +saucy." pg31100.txt +saucy? pg100.txt +saul pg3200.txt +saul, pg3200.txt +sauna, pg3200.txt +sauntering pg3200.txt +sausage-stuffing," pg3200.txt +sausages pg3200.txt +sausages, pg3200.txt +sausages. pg3200.txt +saussure, pg3200.txt +saussure. pg3200.txt +saute pg3200.txt +sauter pg3200.txt +sauter) pg3200.txt +sautez! pg3200.txt +sav'd pg100.txt +sav'd! pg100.txt +sav'd, pg100.txt +sav'd? pg100.txt +savage pg3200.txt, pg100.txt +savage, pg100.txt +savage-looking pg3200.txt +savage-wild, pg100.txt +savage. pg3200.txt +savage; pg100.txt +savagely pg3200.txt +savagely: pg3200.txt +savageness pg100.txt +savagery pg3200.txt +savagery, pg3200.txt +savagery; pg3200.txt, pg100.txt +savages pg3200.txt +savages. pg3200.txt +savages.' pg3200.txt +savages; pg3200.txt +savages?" pg3200.txt +savants pg3200.txt +save pg31100.txt, pg3200.txt, pg100.txt +save, pg3200.txt, pg100.txt +save; pg3200.txt +saved pg31100.txt, pg3200.txt, pg100.txt +saved!" pg3200.txt +saved, pg3200.txt +saved--now pg3200.txt +saved. pg3200.txt, pg100.txt +saved." pg3200.txt +saved] pg3200.txt +saves pg3200.txt, pg100.txt +saves, pg31100.txt +saving pg31100.txt, pg3200.txt, pg100.txt +saving. pg3200.txt +saving." pg3200.txt +saving?" pg3200.txt +savings. pg3200.txt +savior, pg3200.txt +saviour pg3200.txt +saviour's pg3200.txt +saviour, pg3200.txt +saviour. pg3200.txt +saviour." pg3200.txt +savor pg3200.txt +savors pg3200.txt +savory pg3200.txt +savour pg100.txt +savour, pg100.txt +savours pg100.txt +savours. pg100.txt +savoury pg3200.txt +savoury, pg100.txt +savoy. pg3200.txt +saw pg31100.txt, pg3200.txt, pg100.txt +saw!" pg3200.txt +saw'st pg100.txt +saw't. pg100.txt +saw, pg31100.txt, pg3200.txt, pg100.txt +saw," pg31100.txt +saw- pg3200.txt, pg100.txt +saw--" pg3200.txt +saw--that pg3200.txt +saw-log. pg3200.txt +saw-saw- pg3200.txt +saw. pg31100.txt, pg3200.txt, pg100.txt +saw." pg31100.txt, pg3200.txt +saw.' pg3200.txt +saw: pg3200.txt +saw; pg31100.txt, pg100.txt +saw? pg100.txt +saw?" pg31100.txt, pg3200.txt +sawdust, pg3200.txt +sawed pg3200.txt +sawest pg100.txt +sawest. pg100.txt +sawing pg3200.txt +sawlsberry pg3200.txt +sawmill. pg3200.txt +sawn pg3200.txt +sawn. pg100.txt +saws pg3200.txt +saws. pg100.txt +sawyer pg3200.txt +sawyer!" pg3200.txt +sawyer, pg3200.txt +sawyer--'" pg3200.txt +sawyer--sir." pg3200.txt +sawyer. pg3200.txt +sawyer." pg3200.txt +sawyer.' pg3200.txt +sawyer?" pg3200.txt +saxon pg3200.txt +saxon. pg3200.txt +saxons pg3200.txt +saxons, pg100.txt +saxony pg3200.txt +saxony's pg100.txt +say pg31100.txt, pg3200.txt, pg100.txt +say! pg31100.txt, pg100.txt +say!" pg3200.txt +say" pg3200.txt +say"--but pg3200.txt +say'st pg100.txt +say'st, pg100.txt +say'st? pg100.txt +say'th, pg3200.txt +say) pg3200.txt, pg100.txt +say), pg31100.txt +say, pg31100.txt, pg3200.txt, pg100.txt +say,-- pg3200.txt +say,] pg3200.txt +say- pg100.txt +say-- pg31100.txt, pg3200.txt +say--" pg3200.txt +say--' pg3200.txt +say--but pg31100.txt +say--except pg31100.txt +say--fourteen pg31100.txt +say--he pg3200.txt +say--hey?" pg3200.txt +say--let pg3200.txt +say--listening, pg3200.txt +say--of pg31100.txt +say--she pg31100.txt +say--so pg3200.txt +say--the pg3200.txt +say--what pg3200.txt +say--where pg3200.txt +say-so's, pg3200.txt +say-so. pg3200.txt +say. pg31100.txt, pg3200.txt, pg100.txt +say." pg31100.txt, pg3200.txt +say: pg31100.txt, pg3200.txt, pg100.txt +say:-- pg3200.txt +say; pg31100.txt, pg3200.txt, pg100.txt +say? pg31100.txt, pg3200.txt, pg100.txt +say?" pg31100.txt, pg3200.txt +say?' pg3200.txt +say?- pg100.txt +say?--and pg3200.txt +sayers pg3200.txt +sayin' pg3200.txt +saying pg31100.txt, pg3200.txt, pg100.txt +saying! pg100.txt +saying) pg3200.txt +saying, pg31100.txt, pg3200.txt, pg100.txt +saying,-- pg31100.txt, pg3200.txt +saying-- pg31100.txt, pg3200.txt +saying--" pg3200.txt +saying. pg31100.txt, pg3200.txt, pg100.txt +saying." pg31100.txt, pg3200.txt +saying: pg31100.txt, pg3200.txt +saying:] pg3200.txt +saying; pg3200.txt +saying? pg100.txt +sayings pg3200.txt +sayings, pg3200.txt +sayings. pg100.txt +says pg31100.txt, pg3200.txt, pg100.txt +says! pg3200.txt +says" pg3200.txt +says, pg31100.txt, pg3200.txt, pg100.txt +says,-- pg3200.txt +says- pg100.txt +says-- pg3200.txt +says--" pg3200.txt +says--' pg3200.txt +says--anglice. pg3200.txt +says--he pg3200.txt +says. pg31100.txt, pg3200.txt, pg100.txt +says." pg31100.txt, pg3200.txt +says.' pg3200.txt +says.'" pg31100.txt +says.) pg3200.txt +says: pg3200.txt, pg100.txt +says:-- pg3200.txt +says:] pg3200.txt +says; pg31100.txt, pg3200.txt +says?" pg31100.txt +sc_1 pg100.txt +sc_2 pg100.txt +scab pg100.txt +scab! pg100.txt +scab. pg100.txt +scabbard, pg3200.txt +scabbard," pg3200.txt +scabbard. pg3200.txt +scabbard; pg3200.txt +scabbard? pg3200.txt +scabs? pg100.txt +scaffold pg3200.txt +scaffold?" pg3200.txt +scaffoldage- pg100.txt +scaffolding, pg3200.txt +scaffoldings pg3200.txt +scaffolds, pg3200.txt +scala, pg3200.txt +scald pg3200.txt +scalded pg3200.txt +scale pg31100.txt, pg3200.txt, pg100.txt +scale, pg100.txt +scale. pg3200.txt +scales pg3200.txt, pg100.txt +scales, pg100.txt +scales. pg3200.txt +scales.] pg3200.txt +scaling-ladders pg100.txt +scaling-ladders. pg3200.txt +scalloped pg3200.txt +scalp pg100.txt +scalp-lock pg3200.txt +scalp. pg3200.txt +scalped pg3200.txt +scalped. pg3200.txt +scalps pg100.txt +scamander; pg3200.txt +scamper pg3200.txt +scamper! pg3200.txt +scampered pg3200.txt +scampering pg31100.txt, pg3200.txt +scandal pg31100.txt, pg3200.txt +scandal, pg31100.txt, pg3200.txt +scandal--for pg3200.txt +scandal. pg100.txt +scandal? pg3200.txt +scandaliz'd. pg100.txt +scandalized pg3200.txt +scandalous pg31100.txt, pg3200.txt +scandalous, pg3200.txt +scandalous. pg3200.txt +scandalous." pg3200.txt +scandalous; pg3200.txt +scandalously pg31100.txt +scandals pg31100.txt +scann'd, pg100.txt +scann'd. pg100.txt +scanned pg3200.txt +scanning pg3200.txt +scant pg3200.txt, pg100.txt +scanted, pg100.txt +scantily pg31100.txt, pg3200.txt +scanting pg100.txt +scantling pg100.txt +scanty pg3200.txt +scap'd! pg100.txt +scap'd, pg100.txt +scape, pg100.txt +scape- pg100.txt +scape. pg100.txt +scape; pg100.txt +scape? pg100.txt +scar pg3200.txt +scar'd pg100.txt +scar, pg100.txt +scarborough, pg31100.txt +scarce pg3200.txt, pg100.txt +scarce, pg100.txt +scarce--so pg3200.txt +scarce. pg3200.txt +scarce." pg31100.txt +scarcely pg31100.txt, pg3200.txt, pg100.txt +scarcely, pg3200.txt +scarcity pg3200.txt +scare pg3200.txt +scare--what pg3200.txt +scare. pg3200.txt +scare: pg3200.txt +scarecrow, pg3200.txt +scared pg3200.txt +scared, pg3200.txt +scared. pg3200.txt +scared.) pg3200.txt +scared: pg3200.txt +scared; pg3200.txt +scarf pg100.txt +scarf! pg100.txt +scarf, pg100.txt +scarf; pg100.txt +scarf? pg100.txt +scarlet pg3200.txt +scarlet, pg3200.txt, pg100.txt +scarlet; pg100.txt +scarre pg100.txt +scarred pg3200.txt +scarred, pg3200.txt +scars pg100.txt +scars, pg100.txt +scars. pg3200.txt +scarus pg100.txt +scary. pg3200.txt +scasely. pg3200.txt +scath, pg100.txt +scathe pg100.txt +scatter pg3200.txt, pg100.txt +scatter!" pg3200.txt +scatter'd, pg100.txt +scatter'd; pg100.txt +scattered pg31100.txt, pg3200.txt, pg100.txt +scattered, pg31100.txt, pg3200.txt +scattering pg3200.txt +scattering. pg3200.txt +scatteringly pg3200.txt +scatters pg100.txt +scavenger." pg3200.txt +scene pg31100.txt, pg3200.txt, pg100.txt +scene! pg100.txt +scene, pg31100.txt, pg3200.txt, pg100.txt +scene-painter pg31100.txt +scene. pg31100.txt, pg3200.txt, pg100.txt +scene." pg31100.txt, pg3200.txt +scene.' pg3200.txt +scene.--messina. pg100.txt +scene: pg3200.txt, pg100.txt +scene; pg31100.txt +scenery pg31100.txt, pg3200.txt +scenery! pg3200.txt +scenery, pg3200.txt +scenery--like pg3200.txt +scenery. pg3200.txt +scenery." pg31100.txt, pg3200.txt +scenery; pg3200.txt +scenes pg31100.txt, pg3200.txt +scenes, pg31100.txt, pg3200.txt, pg100.txt +scenes. pg3200.txt +scenes; pg31100.txt +scenic pg3200.txt +scent pg3200.txt +scent, pg3200.txt +scent. pg3200.txt, pg100.txt +scent." pg3200.txt +scent; pg100.txt +scented pg3200.txt +scents pg3200.txt +scepter. pg3200.txt +sceptered pg3200.txt +scepticism. pg31100.txt +sceptre pg3200.txt, pg100.txt +sceptre, pg100.txt +sceptre. pg100.txt +sceptre? pg100.txt +sceptred pg3200.txt +sceptres pg100.txt +sceptres. pg100.txt +schau' pg3200.txt +schedule, pg100.txt +schedule. pg100.txt +scheherazade, pg3200.txt +scheideck pg3200.txt +scheme pg31100.txt, pg3200.txt +scheme, pg31100.txt, pg3200.txt +scheme--the pg3200.txt +scheme. pg31100.txt, pg3200.txt +scheme." pg31100.txt +scheme.--as pg31100.txt +scheme; pg31100.txt, pg3200.txt +scheme? pg31100.txt +scheme?" pg31100.txt, pg3200.txt +schemed pg3200.txt +schemes pg31100.txt, pg3200.txt +schemes, pg31100.txt, pg3200.txt +schemes--failures, pg3200.txt +schemes. pg31100.txt, pg3200.txt +scheming pg31100.txt, pg3200.txt +scheming, pg3200.txt +schenck pg3200.txt +schiller pg3200.txt +schlacht? pg3200.txt +schlag pg3200.txt +schlag, pg3200.txt +schmul pg3200.txt +schnattern pg3200.txt +schnattern; pg3200.txt +schofield, pg3200.txt +scholar pg31100.txt, pg3200.txt, pg100.txt +scholar, pg3200.txt +scholar. pg3200.txt, pg100.txt +scholar." pg31100.txt, pg3200.txt +scholar?' pg3200.txt +scholarly pg3200.txt, pg100.txt +scholars pg3200.txt +scholars, pg3200.txt +scholars. pg3200.txt +scholars." pg3200.txt +schon pg3200.txt +schon! pg3200.txt +schon!--" pg3200.txt +schon, pg3200.txt +school pg31100.txt, pg3200.txt, pg100.txt +school! pg3200.txt +school'd pg100.txt +school'd. pg100.txt +school's pg3200.txt +school, pg31100.txt, pg3200.txt, pg100.txt +school- pg3200.txt +school-- pg3200.txt +school--ah, pg3200.txt +school--what pg3200.txt +school-boy pg3200.txt +school-boy. pg3200.txt +school-girl's pg31100.txt +school-girls. pg3200.txt +school-room pg3200.txt +school-slate pg3200.txt +school. pg3200.txt, pg100.txt +school." pg3200.txt +school; pg3200.txt, pg100.txt +school?" pg3200.txt +schoolboys; pg3200.txt +schoolfellows, pg100.txt +schoolhouse pg3200.txt +schoolhouse. pg3200.txt +schoolmaster pg100.txt +schoolmaster, pg3200.txt, pg100.txt +schoolmaster. pg100.txt +schoolmaster: pg100.txt +schoolmate, pg3200.txt +schoolroom, pg31100.txt +schools pg3200.txt +schools), pg3200.txt +schools, pg3200.txt, pg100.txt +schools. pg3200.txt +schools: pg3200.txt +schooner pg3200.txt +schooner.) pg3200.txt +schoonmaker pg3200.txt +schottische. pg3200.txt +schreckhorn pg3200.txt +schreiben-- pg3200.txt +schreiner's pg3200.txt +schuyler, pg3200.txt +schwarenbach pg3200.txt +schweigst. pg3200.txt +schweitzerhof pg3200.txt +schweitzerhof." pg3200.txt +schwiegervater? pg3200.txt +sciatica, pg100.txt +sciatica? pg100.txt +sciaticas, pg100.txt +science pg3200.txt, pg100.txt +science!" pg3200.txt +science"!' pg3200.txt +science's pg3200.txt +science) pg3200.txt +science, pg3200.txt +science--all pg3200.txt +science-healing pg3200.txt +science. pg3200.txt +science." pg3200.txt +science.' pg3200.txt +science: pg3200.txt +science; pg3200.txt +sciences pg3200.txt +sciences, pg100.txt +scientiarum. pg3200.txt +scientific pg3200.txt +scientific--and pg3200.txt +scientific. pg3200.txt +scientifically pg3200.txt +scientist pg3200.txt +scientist's pg3200.txt +scientist's? pg3200.txt +scientist, pg3200.txt +scientist-church pg3200.txt +scientist. pg3200.txt +scientist." pg3200.txt +scientists pg3200.txt +scientists, pg3200.txt +scientists. pg3200.txt +scimitar, pg100.txt +scintillating pg3200.txt +sciolist's pg3200.txt +scion pg3200.txt +scion. pg100.txt +scipio pg3200.txt +scissors." pg3200.txt +scoff! pg100.txt +scoff. pg3200.txt +scoffed pg3200.txt +scoffer. pg100.txt +scoffing, pg3200.txt +scoffingly; pg3200.txt +scoffs pg100.txt +scoffs. pg100.txt +scoffs." pg3200.txt +scold pg31100.txt, pg3200.txt, pg100.txt +scold. pg100.txt +scold; pg100.txt +scolding pg3200.txt, pg100.txt +scolding, pg3200.txt +scolding. pg31100.txt +scolding? pg3200.txt +scollard. pg3200.txt +sconce. pg100.txt +sconce? pg100.txt +scone pg100.txt +scone. pg100.txt +scone? pg100.txt +scoop pg3200.txt +scooped pg3200.txt +scooping pg3200.txt +scooting pg3200.txt +scope pg31100.txt, pg3200.txt, pg100.txt +scope, pg100.txt +scope. pg100.txt +scope; pg100.txt +scorchingly pg3200.txt +score pg31100.txt, pg3200.txt, pg100.txt +score, pg3200.txt, pg100.txt +score. pg31100.txt, pg3200.txt, pg100.txt +score; pg31100.txt +score? pg3200.txt +scored pg3200.txt +scored. pg3200.txt +scores pg31100.txt, pg3200.txt +scoresby pg3200.txt +scorn pg31100.txt, pg3200.txt, pg100.txt +scorn'd? pg100.txt +scorn, pg100.txt +scorn. pg3200.txt, pg100.txt +scorn." pg31100.txt +scorn.] pg3200.txt +scorn: pg3200.txt +scorn; pg100.txt +scorn? pg100.txt +scorned pg3200.txt, pg100.txt +scorned. pg31100.txt +scorned? pg3200.txt +scornful pg3200.txt +scornful: pg3200.txt +scornfully! pg31100.txt +scornfully: pg3200.txt +scornfully; pg100.txt +scorns pg100.txt +scorns! pg100.txt +scorns, pg100.txt +scot pg3200.txt, pg100.txt +scot, pg100.txt +scot-free pg3200.txt +scot. pg100.txt +scot; pg100.txt +scotch pg31100.txt, pg3200.txt +scotch, pg31100.txt +scotch. pg3200.txt +scotchman pg3200.txt +scotia pg3200.txt +scotland pg31100.txt, pg100.txt +scotland! pg100.txt +scotland), pg3200.txt +scotland, pg31100.txt, pg3200.txt, pg100.txt +scotland. pg31100.txt +scotland." pg31100.txt +scotland: pg3200.txt +scotland; pg31100.txt +scotland? pg100.txt +scotland?" pg3200.txt +scots pg100.txt +scots, pg3200.txt, pg100.txt +scott pg31100.txt, pg3200.txt +scott! pg3200.txt +scott!" pg3200.txt +scott!"' pg3200.txt +scott!' pg3200.txt +scott, pg3200.txt +scott--" pg3200.txt +scott. pg3200.txt +scott; pg31100.txt +scott?" pg3200.txt +scottish pg3200.txt +scotty: pg3200.txt +scoundrel pg31100.txt, pg3200.txt +scoundrel!" pg3200.txt +scoundrelly. pg3200.txt +scoundrels pg3200.txt +scoundrels, pg3200.txt +scour pg3200.txt, pg100.txt +scour'd. pg100.txt +scour.' pg100.txt +scoured pg3200.txt +scoured. pg3200.txt +scourge pg3200.txt, pg100.txt +scourge! pg100.txt +scourge, pg100.txt +scourge. pg100.txt +scourgings pg3200.txt +scouring pg100.txt +scout pg100.txt +scout, pg3200.txt +scout; pg3200.txt +scouted pg3200.txt +scouting pg3200.txt +scouts. pg3200.txt +scouts?" pg3200.txt +scow pg3200.txt +scow. pg3200.txt +scowled pg3200.txt +scowling pg3200.txt +scrabblehorn; pg3200.txt +scraggy pg3200.txt +scramble pg3200.txt +scrambled pg3200.txt +scrambling pg3200.txt +scrap pg3200.txt +scrap-book pg3200.txt +scrap-booked pg3200.txt +scrapbook." pg3200.txt +scrape pg3200.txt, pg100.txt +scrape, pg31100.txt, pg3200.txt +scrape. pg3200.txt +scrape?" pg3200.txt +scraped pg3200.txt +scrapes." pg31100.txt +scrapes? pg3200.txt +scrapin' pg3200.txt +scraping pg3200.txt +scraps pg31100.txt, pg3200.txt +scraps. pg3200.txt, pg100.txt +scratch pg3200.txt, pg100.txt +scratch'd. pg100.txt +scratch. pg3200.txt, pg100.txt +scratched pg3200.txt +scratches pg3200.txt +scratching pg31100.txt, pg3200.txt +scrawls--but pg31100.txt +screaking pg3200.txt +scream: pg3200.txt +screamed pg31100.txt, pg3200.txt +screamer, pg3200.txt +screaming pg3200.txt +screamings pg3200.txt +screech pg3200.txt +screeched pg3200.txt +screeching pg3200.txt +screen pg31100.txt, pg3200.txt +screen. pg31100.txt +screened pg31100.txt +screening pg3200.txt +screens, pg31100.txt +screw pg3200.txt +screw, pg3200.txt +screw." pg3200.txt +screwdriver pg3200.txt +screwdriver--just pg3200.txt +screwdriver." pg3200.txt +screwdriver? pg3200.txt +screwdriver?" pg3200.txt +scribble pg3200.txt +scribbler pg3200.txt +scribblers pg3200.txt +scribbling pg3200.txt +scribbling, pg3200.txt +scribe. pg100.txt +scribes pg3200.txt +scribes, pg100.txt +scribes. pg100.txt +scribes; pg3200.txt +scribner's pg3200.txt +scrimmage pg3200.txt +scrimp pg3200.txt +scrimped pg3200.txt +scrimping pg3200.txt +scrip pg3200.txt +scrip, pg3200.txt +scrip. pg100.txt +scrippage. pg100.txt +scriptural pg3200.txt +scriptural. pg3200.txt +scriptural." pg3200.txt +scripture, pg100.txt +scripture: pg3200.txt +scripture? pg100.txt +scriptures pg3200.txt +scriptures, pg3200.txt +scriptures. pg3200.txt +scriptures." pg3200.txt +scriptures; pg3200.txt +scrivener pg100.txt +scrofulous pg3200.txt +scroll pg3200.txt +scroll, pg100.txt +scroll. pg100.txt +scroll." pg3200.txt +scroll.] pg100.txt +scroll] pg100.txt +scrolls, pg3200.txt +scroop pg100.txt +scroop, pg100.txt +scroop. pg100.txt +scrouging pg3200.txt +scrowl. pg100.txt +scrub pg3200.txt +scrubbed pg3200.txt +scrubbing; pg3200.txt +scruff pg3200.txt +scrunch pg3200.txt +scruple pg31100.txt, pg100.txt +scruple, pg100.txt +scruple. pg100.txt +scruples pg31100.txt, pg3200.txt +scruples. pg31100.txt +scruples; pg31100.txt +scrupling pg3200.txt +scrupulous pg31100.txt +scrutinized pg3200.txt +scrutinizing pg3200.txt +scrutiny. pg3200.txt +scudamore pg31100.txt +scudamore, pg31100.txt +scudamore." pg31100.txt +scuffle pg3200.txt +scuffle, pg3200.txt +scuffling pg3200.txt +scuffling, pg100.txt +scufflings pg3200.txt +scull pg31100.txt +scullion! pg100.txt +sculls pg100.txt +sculptor pg3200.txt +sculptor. pg3200.txt +sculptor." pg3200.txt +sculpture pg3200.txt +sculpture; pg3200.txt +sculptured pg3200.txt +sculptures pg3200.txt +scum pg3200.txt +scum, pg3200.txt +scum. pg100.txt +scurried pg3200.txt +scurrility. pg3200.txt, pg100.txt +scurrying pg3200.txt +scurvy pg3200.txt +scuttle pg3200.txt +scuttles pg3200.txt +scythian, pg100.txt +se'ennight pg31100.txt +se'f.' pg3200.txt +se'night pg31100.txt +se'nnight." pg31100.txt +se, pg100.txt +se--" pg3200.txt +sea pg31100.txt, pg3200.txt, pg100.txt +sea! pg3200.txt, pg100.txt +sea!" pg3200.txt +sea, pg31100.txt, pg3200.txt, pg100.txt +sea," pg3200.txt +sea- pg100.txt +sea--he pg3200.txt +sea-air. pg3200.txt +sea-beautiful pg3200.txt +sea-captains pg3200.txt +sea-change pg100.txt +sea-coast pg100.txt +sea-fight pg100.txt +sea-fight] pg100.txt +sea-front, pg3200.txt +sea-green pg3200.txt +sea-level pg3200.txt +sea-level, pg3200.txt +sea-level. pg3200.txt +sea-like. pg100.txt +sea-line pg3200.txt +sea-monster! pg100.txt +sea-serpent, pg3200.txt +sea-shells pg3200.txt +sea-side. pg100.txt +sea-side; pg100.txt +sea-sorrow. pg100.txt +sea-storm? pg100.txt +sea-wall. pg3200.txt +sea-walls. pg3200.txt +sea. pg31100.txt, pg3200.txt, pg100.txt +sea." pg31100.txt, pg3200.txt +sea: pg3200.txt +sea; pg31100.txt, pg3200.txt, pg100.txt +sea? pg100.txt +sea?" pg3200.txt +seal pg3200.txt, pg100.txt +seal!" pg3200.txt +seal'd pg100.txt +seal'd, pg100.txt +seal'd? pg100.txt +seal, pg3200.txt, pg100.txt +seal--fetch pg3200.txt +seal-ring, pg3200.txt +seal. pg31100.txt, pg3200.txt, pg100.txt +seal." pg3200.txt +seal; pg100.txt +sealed pg3200.txt +sealed!" pg3200.txt +sealed; pg3200.txt +sealing pg100.txt +seals pg100.txt +seals. pg3200.txt +seam pg100.txt +seam. pg3200.txt +seaman pg3200.txt +seamanship pg3200.txt +seamed pg3200.txt +seamen pg3200.txt +seams pg3200.txt +seamstresses pg3200.txt +seance, pg3200.txt +seaport pg3200.txt +sear pg3200.txt, pg100.txt +search pg31100.txt, pg3200.txt, pg100.txt +search! pg3200.txt +search, pg3200.txt, pg100.txt +search. pg3200.txt, pg100.txt +searched pg3200.txt +searched." pg3200.txt +searched.' pg3200.txt +searchers pg3200.txt +searches pg3200.txt, pg100.txt +searching pg31100.txt, pg3200.txt +searching-expeditions pg3200.txt +searching. pg3200.txt +searching." pg3200.txt +searchingly pg3200.txt +seas pg3200.txt, pg100.txt +seas! pg31100.txt, pg3200.txt +seas, pg3200.txt, pg100.txt +seas--at pg3200.txt +seas. pg3200.txt, pg100.txt +seas; pg3200.txt, pg100.txt +seas? pg3200.txt, pg100.txt +seashore pg3200.txt +seashore, pg3200.txt +seasickness pg3200.txt +seasickness. pg3200.txt +seaside, pg31100.txt +season pg31100.txt, pg3200.txt, pg100.txt +season! pg3200.txt +season'd pg100.txt +season's pg3200.txt +season). pg3200.txt +season, pg31100.txt, pg3200.txt, pg100.txt +season," pg3200.txt +season--gang pg3200.txt +season. pg3200.txt, pg100.txt +season." pg31100.txt, pg3200.txt +season.' pg3200.txt +season; pg3200.txt, pg100.txt +seasonable; pg31100.txt +seasoned pg3200.txt +seasoning pg3200.txt +seasons pg3200.txt +seasons. pg3200.txt +seat pg31100.txt, pg3200.txt, pg100.txt +seat! pg100.txt +seat, pg31100.txt, pg3200.txt, pg100.txt +seat--desolation--the pg3200.txt +seat. pg3200.txt, pg100.txt +seat." pg3200.txt +seat: pg3200.txt +seat; pg3200.txt +seat? pg3200.txt +seated pg31100.txt, pg3200.txt +seated, pg3200.txt +seated. pg31100.txt, pg100.txt +seated; pg31100.txt +seating pg3200.txt +seats pg31100.txt, pg3200.txt +seats, pg31100.txt, pg3200.txt, pg100.txt +seats. pg3200.txt +seats; pg3200.txt +seaweed--is pg3200.txt +sebastian pg3200.txt, pg100.txt +sebastian, pg100.txt +sebastian- pg100.txt +sebastian. pg3200.txt +sebastian? pg100.txt +sebastian] pg100.txt +sebastians, pg3200.txt +sebastopol pg3200.txt +sebastopol, pg3200.txt +secession pg3200.txt +secessionist. pg3200.txt +sech pg3200.txt +secluded pg3200.txt +secluded, pg3200.txt +seclusion pg3200.txt +seclusion, pg31100.txt +seclusion. pg31100.txt, pg3200.txt +seclusion?--i pg31100.txt +second pg31100.txt, pg3200.txt, pg100.txt +second!" pg3200.txt +second, pg31100.txt, pg3200.txt, pg100.txt +second--we'll pg3200.txt +second-class pg3200.txt +second-hand pg3200.txt +second-hand, pg3200.txt +second-hand. pg3200.txt +second-rate pg31100.txt +second. pg31100.txt, pg3200.txt +second." pg31100.txt, pg3200.txt +second: pg3200.txt +second; pg3200.txt +secondary pg31100.txt, pg3200.txt +secondary. pg100.txt +seconding pg31100.txt +secondly, pg31100.txt, pg3200.txt +seconds pg3200.txt, pg100.txt +seconds--ten pg3200.txt +seconds. pg3200.txt +seconds; pg100.txt +secrecy pg31100.txt, pg3200.txt, pg100.txt +secrecy, pg31100.txt, pg3200.txt, pg100.txt +secrecy--mind, pg3200.txt +secrecy. pg100.txt +secrecy." pg3200.txt +secrecy; pg31100.txt, pg100.txt +secresy pg31100.txt +secret pg31100.txt, pg3200.txt, pg100.txt +secret!" pg31100.txt +secret, pg31100.txt, pg3200.txt, pg100.txt +secret-- pg31100.txt +secret--everybody pg3200.txt +secret. pg31100.txt, pg3200.txt, pg100.txt +secret." pg31100.txt, pg3200.txt +secret; pg31100.txt, pg3200.txt +secret? pg100.txt +secret?" pg3200.txt +secret?' pg3200.txt +secretaries pg100.txt +secretary pg3200.txt, pg100.txt +secretary's pg3200.txt +secretary) pg3200.txt +secretary, pg3200.txt, pg100.txt +secretary. pg3200.txt +secretary: pg3200.txt +secretary; pg100.txt +secretaryship pg3200.txt +secrete pg3200.txt +secreted pg3200.txt +secreted. pg31100.txt +secretly pg31100.txt, pg3200.txt +secretly, pg3200.txt +secretly. pg3200.txt, pg100.txt +secrets pg31100.txt, pg3200.txt, pg100.txt +secrets! pg100.txt +secrets, pg3200.txt, pg100.txt +secrets--cause pg3200.txt +secrets. pg100.txt +secrets? pg3200.txt, pg100.txt +sect pg3200.txt +sectary; pg100.txt +section pg31100.txt, pg3200.txt +section, pg3200.txt +section. pg3200.txt +section?" pg3200.txt +sections pg3200.txt +sections, pg3200.txt +sects pg3200.txt +secty. pg3200.txt +secularists, pg3200.txt +secure pg31100.txt, pg3200.txt, pg100.txt +secure, pg31100.txt, pg3200.txt, pg100.txt +secure--just pg3200.txt +secure. pg31100.txt, pg3200.txt, pg100.txt +secure." pg31100.txt +secure; pg100.txt +secured pg31100.txt, pg3200.txt +secured, pg3200.txt +secured. pg3200.txt +secured; pg3200.txt +securely pg31100.txt, pg3200.txt, pg100.txt +secures pg31100.txt, pg3200.txt +securing pg31100.txt, pg3200.txt +security pg3200.txt, pg100.txt +security! pg100.txt +security, pg31100.txt, pg3200.txt, pg100.txt +security. pg3200.txt, pg100.txt +sedan pg3200.txt +sedate pg31100.txt, pg3200.txt +sedge pg100.txt +sedgemoor, pg3200.txt +sediment pg3200.txt +sediment. pg3200.txt +sediments, pg3200.txt +sedition pg100.txt +sedition, pg100.txt +seditious pg3200.txt +seduc'd pg100.txt +seduce pg3200.txt +seduce, pg100.txt +seduced pg31100.txt, pg3200.txt +seduced? pg100.txt +seducer pg3200.txt +seducing pg3200.txt +seduction. pg3200.txt +seductions pg3200.txt +seductive pg3200.txt +seductive, pg3200.txt +see! pg3200.txt, pg100.txt +see!" pg3200.txt +see!' pg3200.txt +see't, pg100.txt +see't. pg100.txt +see't; pg100.txt +see't? pg100.txt +see, pg31100.txt, pg3200.txt, pg100.txt +see,--er--mizzes--" pg3200.txt +see- pg100.txt +see-- pg3200.txt +see--" pg3200.txt +see--"all pg3200.txt +see--' pg3200.txt +see----" pg3200.txt +see--friday--friday. pg3200.txt +see--one pg31100.txt +see--there pg3200.txt +see--what's pg3200.txt +see. pg31100.txt, pg3200.txt, pg100.txt +see." pg31100.txt, pg3200.txt +see.' pg3200.txt +see: pg3200.txt, pg100.txt +see; pg3200.txt, pg100.txt +see? pg3200.txt, pg100.txt +see?" pg3200.txt +see?", pg3200.txt +see?- pg100.txt +seeable. pg3200.txt +seed pg3200.txt +seed, pg3200.txt +seed--" pg3200.txt +seed. pg3200.txt +seed; pg3200.txt, pg100.txt +seed? pg3200.txt +seeds pg3200.txt, pg100.txt +seeds, pg3200.txt, pg100.txt +seeds. pg100.txt +seedsman pg100.txt +seedy, pg3200.txt +seegars, pg3200.txt +seegwun. pg3200.txt +seeing pg31100.txt, pg3200.txt, pg100.txt +seeing, pg31100.txt, pg3200.txt, pg100.txt +seeing. pg3200.txt, pg100.txt +seeing; pg100.txt +seek pg31100.txt, pg3200.txt, pg100.txt +seek'st. pg100.txt +seek, pg31100.txt, pg100.txt +seek. pg100.txt +seek; pg31100.txt, pg100.txt +seek? pg100.txt +seeker pg3200.txt +seeking pg31100.txt, pg3200.txt +seeking? pg100.txt +seekings pg3200.txt +seeks pg3200.txt, pg100.txt +seeks.' pg100.txt +seely; pg100.txt +seem pg31100.txt, pg3200.txt, pg100.txt +seem'd pg100.txt +seem'st- pg100.txt +seem'st. pg100.txt +seem, pg100.txt +seem--" pg3200.txt +seem. pg100.txt +seem." pg3200.txt +seem; pg100.txt +seemed pg31100.txt, pg3200.txt, pg100.txt +seemed, pg31100.txt +seemed: pg3200.txt +seemeth pg3200.txt +seemeth--" pg3200.txt +seeming pg31100.txt, pg3200.txt, pg100.txt +seeming! pg100.txt +seeming, pg100.txt +seeming. pg3200.txt, pg100.txt +seemingly pg31100.txt, pg3200.txt +seemly pg3200.txt +seems pg31100.txt, pg3200.txt, pg100.txt +seems!--and pg3200.txt +seems, pg31100.txt, pg3200.txt, pg100.txt +seems-- pg3200.txt +seems--i pg3200.txt +seems. pg3200.txt, pg100.txt +seems; pg100.txt +seen pg31100.txt, pg3200.txt, pg100.txt +seen! pg3200.txt, pg100.txt +seen!" pg31100.txt +seen't! pg100.txt +seen), pg31100.txt +seen, pg31100.txt, pg3200.txt, pg100.txt +seen--aggregating pg3200.txt +seen--but pg3200.txt +seen--praying pg3200.txt +seen. pg31100.txt, pg3200.txt, pg100.txt +seen." pg31100.txt, pg3200.txt +seen: pg3200.txt +seen; pg31100.txt, pg3200.txt, pg100.txt +seen? pg3200.txt, pg100.txt +seen?" pg3200.txt +seer pg3200.txt +sees pg31100.txt, pg3200.txt, pg100.txt +sees, pg3200.txt +sees--money, pg3200.txt +sees. pg100.txt +seest pg100.txt +seeth pg3200.txt +seethes. pg100.txt +seething pg3200.txt +segregated pg3200.txt +sehen. pg3200.txt +sehen? pg3200.txt +sehr pg3200.txt +sehr, pg3200.txt +sehr. pg3200.txt +seht pg3200.txt +sei. pg3200.txt +seide pg3200.txt +seigneur. pg100.txt +sein, pg3200.txt +sein," pg3200.txt +seine pg3200.txt +seine, pg3200.txt +seiz'd pg100.txt +seize pg31100.txt, pg3200.txt, pg100.txt +seize. pg31100.txt +seized pg31100.txt, pg3200.txt +seizes pg3200.txt +seizing pg3200.txt +seizing, pg31100.txt +seizure pg100.txt +seizure, pg31100.txt +sejested pg3200.txt +selby pg3200.txt +selby's." pg3200.txt +selby, pg3200.txt +selby. pg3200.txt +selby?" pg3200.txt +seldom pg31100.txt, pg3200.txt, pg100.txt +seldom. pg100.txt +seldom." pg3200.txt +seldomest pg3200.txt +select pg31100.txt, pg3200.txt +select, pg3200.txt +selected pg3200.txt +selected, pg3200.txt +selectest pg3200.txt +selecting pg3200.txt +selection, pg3200.txt +selection. pg31100.txt, pg3200.txt +selections pg3200.txt +selections. pg3200.txt +selections; pg3200.txt +seleucus pg100.txt +seleucus, pg100.txt +seleucus. pg100.txt +seleucus? pg100.txt +self pg31100.txt, pg3200.txt, pg100.txt +self! pg3200.txt, pg100.txt +self!" pg3200.txt +self, pg31100.txt, pg3200.txt, pg100.txt +self- pg100.txt +self--" pg3200.txt +self--and pg3200.txt +self--can't pg3200.txt +self-abuse pg100.txt +self-acquittal. pg3200.txt +self-admission. pg100.txt +self-affairs, pg100.txt +self-affected. pg100.txt +self-amusement pg31100.txt +self-applied pg100.txt +self-appointed pg3200.txt +self-appreciation; pg3200.txt +self-approbation pg3200.txt +self-approval pg3200.txt +self-approval. pg3200.txt +self-command pg31100.txt +self-command, pg31100.txt +self-command. pg31100.txt +self-communing pg3200.txt +self-communion, pg3200.txt +self-comparisons, pg100.txt +self-complacency pg3200.txt +self-complacency. pg3200.txt +self-conceited, pg3200.txt +self-condemnation pg31100.txt +self-consciousness pg3200.txt +self-consciousness. pg3200.txt +self-consequence, pg31100.txt +self-consequence. pg31100.txt +self-contained pg3200.txt +self-contentment, pg31100.txt, pg3200.txt +self-control. pg3200.txt +self-control; pg31100.txt +self-deception." pg3200.txt +self-defense pg3200.txt +self-deification pg3200.txt +self-denial pg31100.txt +self-denial. pg3200.txt +self-denials pg31100.txt +self-denying pg31100.txt +self-destruction. pg31100.txt +self-educated. pg3200.txt +self-endeared. pg100.txt +self-evident pg31100.txt +self-examination pg31100.txt +self-exposure pg3200.txt +self-government"; pg3200.txt +self-granted pg3200.txt +self-gratulation; pg31100.txt +self-importance pg31100.txt +self-imposed. pg31100.txt +self-indulgence; pg31100.txt +self-interest pg31100.txt, pg3200.txt +self-interest's pg3200.txt +self-interest, pg31100.txt +self-interest. pg3200.txt +self-introduction: pg3200.txt +self-killed: pg100.txt +self-loving- pg100.txt +self-misus'd. pg100.txt +self-neglecting. pg100.txt +self-occupied. pg31100.txt +self-pasting pg3200.txt +self-possessed pg3200.txt +self-prejudice pg3200.txt +self-preservation pg3200.txt +self-protections pg3200.txt +self-provocation, pg31100.txt +self-respect pg3200.txt +self-respect. pg3200.txt +self-sacrifice pg3200.txt +self-sacrifice, pg3200.txt +self-sacrifice; pg3200.txt +self-sacrifice? pg3200.txt +self-sacrifices, pg3200.txt +self-sacrificingly pg3200.txt +self-same pg3200.txt +self-satisfied pg3200.txt +self-seeking pg3200.txt +self-seeking, pg3200.txt +self-seeking. pg3200.txt +self-slaughter pg100.txt +self-sovereignty pg100.txt +self-subdu'd; pg100.txt +self-sufficiency pg31100.txt +self-threatenings pg31100.txt +self-willed, pg31100.txt +self-wrong, pg100.txt +self. pg31100.txt, pg3200.txt, pg100.txt +self." pg31100.txt, pg3200.txt +self; pg100.txt +self? pg100.txt +selfish pg31100.txt, pg3200.txt +selfish, pg3200.txt +selfish-- pg3200.txt +selfish--read pg31100.txt +selfish. pg31100.txt +selfish? pg31100.txt +selfish?" pg31100.txt +selfishness pg31100.txt, pg3200.txt +selfishness, pg31100.txt +selfishness. pg31100.txt +selfishness." pg31100.txt +selfishness; pg3200.txt +selkirk. pg3200.txt +sell pg31100.txt, pg3200.txt, pg100.txt +sell! pg3200.txt +sell!" pg3200.txt +sell, pg3200.txt +sell--expense pg3200.txt +sell. pg3200.txt, pg100.txt +sell; pg3200.txt +sell? pg3200.txt +seller pg3200.txt +sellers pg3200.txt +sellers! pg3200.txt +sellers"-- pg3200.txt +sellers's pg3200.txt +sellers). pg3200.txt +sellers, pg3200.txt +sellers,' pg3200.txt +sellers--no pg3200.txt +sellers--simon pg3200.txt +sellers--the pg3200.txt +sellers. pg3200.txt +sellers." pg3200.txt +sellers.']} pg3200.txt +sellers; pg3200.txt +sellers?" pg3200.txt +sellerses. pg3200.txt +selling pg3200.txt, pg100.txt +selling. pg3200.txt +sells pg3200.txt +selves pg31100.txt +selves! pg100.txt +selves. pg100.txt +selves." pg31100.txt +selves: pg3200.txt +selves; pg100.txt +semblance pg3200.txt, pg100.txt +semblance- pg100.txt +semblances. pg100.txt +semi- pg3200.txt +semi-annually. pg3200.txt +semi-starvation.] pg3200.txt +semi-suppression. pg3200.txt +semicircle pg100.txt +seminaries." pg3200.txt +seminary, pg31100.txt, pg3200.txt +seminary. pg3200.txt +seminary." pg3200.txt +semiramis. pg100.txt +sempronius pg100.txt +sempronius. pg100.txt +senate pg3200.txt, pg100.txt +senate! pg100.txt +senate, pg100.txt +senate,'--then." pg3200.txt +senate. pg3200.txt, pg100.txt +senate." pg3200.txt +senate; pg100.txt +senator pg3200.txt, pg100.txt +senator's pg3200.txt +senator, pg3200.txt +senator-cullom--wants pg3200.txt +senator. pg3200.txt, pg100.txt +senator.' pg3200.txt +senatorial pg3200.txt +senators pg3200.txt, pg100.txt +senators, pg100.txt +senators--objected pg3200.txt +senators. pg100.txt +senators; pg100.txt +senatorship." pg3200.txt +sence pg3200.txt +sence, pg3200.txt +sence--" pg3200.txt +send pg31100.txt, pg3200.txt, pg100.txt +send, pg3200.txt, pg100.txt +send- pg3200.txt +send. pg31100.txt, pg3200.txt, pg100.txt +send." pg3200.txt +sender!" pg3200.txt +sender; pg100.txt +sending pg31100.txt, pg3200.txt, pg100.txt +sending--" pg3200.txt +sending. pg100.txt +sends pg3200.txt, pg100.txt +sends, pg100.txt +seneschal, pg3200.txt +seneskal pg3200.txt +senior pg3200.txt +senior. pg3200.txt +seniory, pg100.txt +senis.' pg100.txt +senlis, pg3200.txt +sennet. pg100.txt +sens pg3200.txt +sensation pg31100.txt, pg3200.txt +sensation! pg3200.txt +sensation, pg3200.txt +sensation. pg3200.txt +sensation; pg31100.txt +sensations pg31100.txt, pg3200.txt +sensations, pg31100.txt +sensations-- pg31100.txt +sensations. pg31100.txt, pg3200.txt +sensations; pg31100.txt +sensations? pg31100.txt +sense pg31100.txt, pg3200.txt, pg100.txt +sense! pg100.txt +sense, pg31100.txt, pg3200.txt, pg100.txt +sense- pg100.txt +sense. pg31100.txt, pg3200.txt, pg100.txt +sense." pg3200.txt +sense: pg100.txt +sense; pg31100.txt, pg3200.txt, pg100.txt +sense? pg3200.txt, pg100.txt +sense?" pg3200.txt +senseless pg3200.txt +senseless. pg3200.txt, pg100.txt +senses pg31100.txt, pg3200.txt, pg100.txt +senses, pg31100.txt, pg3200.txt, pg100.txt +senses. pg3200.txt, pg100.txt +senses; pg31100.txt +sensibilities pg31100.txt, pg3200.txt +sensibilities--. pg31100.txt +sensibility pg31100.txt, pg3200.txt +sensibility. pg31100.txt +sensibility; pg31100.txt +sensibility_ pg31100.txt +sensible pg31100.txt, pg3200.txt, pg100.txt +sensible, pg31100.txt, pg3200.txt +sensible. pg31100.txt +sensibly pg31100.txt, pg3200.txt +sensibly, pg31100.txt +sensibly. pg31100.txt, pg100.txt +sensitive pg31100.txt, pg3200.txt +sensualist" pg3200.txt +sensualist." pg3200.txt +sensuality. pg100.txt +sent pg31100.txt, pg3200.txt, pg100.txt +sent, pg3200.txt, pg100.txt +sent- pg100.txt +sent. pg31100.txt, pg3200.txt, pg100.txt +sent; pg31100.txt, pg100.txt +sent? pg100.txt +sent?" pg3200.txt +sentence pg31100.txt, pg3200.txt, pg100.txt +sentence!" pg3200.txt +sentence, pg31100.txt, pg3200.txt, pg100.txt +sentence. pg31100.txt, pg3200.txt, pg100.txt +sentence: pg3200.txt +sentence:--"we pg31100.txt +sentence; pg3200.txt +sentence? pg3200.txt, pg100.txt +sentences pg31100.txt, pg3200.txt +sentences, pg3200.txt, pg100.txt +sentences-- pg3200.txt +sentences. pg31100.txt, pg100.txt +sentences; pg100.txt +sententious pg3200.txt +sententious. pg100.txt +sentient pg3200.txt +sentiment pg31100.txt, pg3200.txt +sentiment, pg31100.txt, pg3200.txt +sentiment," pg3200.txt +sentiment--it pg3200.txt +sentiment. pg31100.txt, pg3200.txt +sentiment." pg3200.txt +sentiment: pg3200.txt +sentimental pg31100.txt, pg3200.txt +sentimental. pg3200.txt +sentimentalities pg3200.txt +sentimentality pg3200.txt +sentimentality. pg3200.txt +sentiments pg31100.txt +sentiments!" pg31100.txt +sentiments, pg31100.txt, pg3200.txt +sentiments. pg31100.txt +sentiments." pg31100.txt, pg3200.txt +sentinel pg3200.txt +sentinel, pg3200.txt +sentinels pg100.txt +sentinels, pg3200.txt +sentinels--and pg3200.txt +sentinels. pg100.txt +sentries pg3200.txt +sentries. pg3200.txt +sepa--" pg3200.txt +separate pg31100.txt, pg3200.txt +separate, pg3200.txt +separate.' pg3200.txt +separated pg31100.txt, pg3200.txt +separated--" pg31100.txt +separated. pg3200.txt, pg100.txt +separately, pg3200.txt +separates pg3200.txt +separates. pg100.txt +separating pg31100.txt, pg3200.txt +separation pg31100.txt, pg3200.txt, pg100.txt +separation, pg31100.txt +separation. pg31100.txt, pg3200.txt +sepoys pg3200.txt +sepoys, pg3200.txt +seppi pg3200.txt +seppi, pg3200.txt +seppi?" pg3200.txt +sept. pg31100.txt +september pg3200.txt +september! pg3200.txt +september, pg31100.txt, pg3200.txt +september--something pg3200.txt +september--we pg3200.txt +september. pg3200.txt +september? pg31100.txt +septentrion. pg100.txt +sepulcher pg3200.txt +sepulchral pg3200.txt +sepulchre pg3200.txt, pg100.txt +sepulchre! pg3200.txt +sepulchre!' pg100.txt +sepulchre, pg3200.txt, pg100.txt +sepulchre--the pg3200.txt +sepulchre. pg3200.txt, pg100.txt +sepulchre." pg3200.txt +sepulchre? pg100.txt +sepulchres pg3200.txt +sepulchres; pg100.txt +sequel pg31100.txt, pg3200.txt +sequel. pg100.txt +sequel; pg100.txt +sequence pg3200.txt +sequence, pg3200.txt +sequent pg100.txt +sequest'ring pg100.txt +sequestration pg100.txt +serai pg3200.txt +sere, pg100.txt +serene pg3200.txt +serene, pg3200.txt +serene--she's pg3200.txt +serene. pg3200.txt +serenely pg3200.txt +serenis-sima- pg100.txt +serenities, pg3200.txt +serenity pg31100.txt, pg3200.txt +serenity, pg3200.txt +serenity. pg3200.txt +serenity.) pg3200.txt +serenity: pg3200.txt +serenity; pg3200.txt +sereny pg3200.txt +serfage. pg3200.txt +serfdom pg3200.txt +sergeant pg3200.txt, pg100.txt +sergeant, pg100.txt +sergeant-at-arms pg3200.txt, pg100.txt +sergeant-at-arms." pg3200.txt +sergeant. pg100.txt +sergeant] pg100.txt +serial pg3200.txt +serially pg3200.txt +serially. pg3200.txt +series pg31100.txt, pg3200.txt +series, pg3200.txt +series. pg3200.txt +serin), pg3200.txt +serious pg31100.txt, pg3200.txt, pg100.txt +serious, pg31100.txt, pg3200.txt +serious--nay, pg3200.txt +serious-minded?" pg3200.txt +serious. pg31100.txt, pg3200.txt +serious." pg31100.txt, pg3200.txt +serious; pg3200.txt +serious?' pg3200.txt +seriously pg31100.txt, pg3200.txt +seriously!" pg31100.txt +seriously) pg31100.txt +seriously, pg31100.txt, pg3200.txt +seriously-- pg31100.txt +seriously. pg31100.txt, pg3200.txt, pg100.txt +seriously; pg31100.txt +seriousness, pg3200.txt +serjeant-major pg3200.txt +serle pg31100.txt +sermon pg31100.txt, pg3200.txt +sermon! pg3200.txt +sermon, pg31100.txt +sermon-making pg31100.txt +sermon. pg3200.txt +sermon; pg3200.txt +sermonize pg3200.txt +sermons pg31100.txt, pg3200.txt +sermons, pg3200.txt +sermons--but pg3200.txt +sermons." pg31100.txt +sermons; pg31100.txt +sermons?" pg31100.txt +serpeant, pg3200.txt +serpent pg3200.txt +serpent, pg3200.txt +serpent--and pg3200.txt +serpent. pg3200.txt, pg100.txt +serpents pg3200.txt +serpents, pg100.txt +serpigo pg100.txt +seruyse, pg3200.txt +serv'd pg100.txt +serv'd. pg100.txt +serv'st. pg100.txt +servant pg31100.txt, pg3200.txt, pg100.txt +servant! pg100.txt +servant!'" pg3200.txt +servant's pg31100.txt +servant, pg31100.txt, pg3200.txt, pg100.txt +servant-girl?" pg3200.txt +servant-maid pg100.txt +servant-tipping pg3200.txt +servant. pg31100.txt, pg3200.txt, pg100.txt +servant." pg31100.txt +servant.] pg100.txt +servant: pg3200.txt +servant; pg3200.txt, pg100.txt +servant? pg100.txt +servants pg31100.txt, pg3200.txt, pg100.txt +servants! pg100.txt +servants' pg3200.txt +servants, pg31100.txt, pg3200.txt, pg100.txt +servants-- pg3200.txt +servants--witches pg3200.txt +servants. pg3200.txt, pg100.txt +servants." pg3200.txt +servants.' pg3200.txt +servants.] pg100.txt +servants: pg3200.txt +servants; pg100.txt +servants? pg3200.txt +servants?" pg3200.txt +servants]. pg100.txt +serve pg31100.txt, pg3200.txt, pg100.txt +serve! pg100.txt +serve, pg31100.txt, pg100.txt +serve- pg100.txt +serve. pg100.txt +serve: pg100.txt +serve; pg100.txt +serve? pg100.txt +served pg31100.txt, pg3200.txt, pg100.txt +served. pg3200.txt, pg100.txt +served; pg3200.txt +serves pg3200.txt, pg100.txt +serves, pg100.txt +serves. pg100.txt +service pg31100.txt, pg3200.txt, pg100.txt +service, pg31100.txt, pg3200.txt, pg100.txt +service--distinguished pg3200.txt +service--nobody pg3200.txt +service. pg3200.txt, pg100.txt +service." pg31100.txt, pg3200.txt +service: pg100.txt +service; pg31100.txt, pg3200.txt, pg100.txt +service? pg100.txt +service?" pg3200.txt +service?' pg3200.txt +serviceable pg31100.txt +serviceable, pg31100.txt +serviceable. pg3200.txt +serviceable? pg100.txt +services pg31100.txt, pg3200.txt, pg100.txt +services, pg3200.txt, pg100.txt +services--just pg3200.txt +services. pg100.txt +services." pg31100.txt, pg3200.txt +services; pg3200.txt, pg100.txt +servilities; pg3200.txt +servility pg3200.txt +servility. pg3200.txt +servility; pg100.txt +servilius pg100.txt +servilius! pg100.txt +servilius, pg100.txt +servilius. pg100.txt +servilius? pg100.txt +serving pg3200.txt +serving, pg100.txt +serving-creature. pg100.txt +serving-man pg100.txt +serving-man, pg100.txt +serving-men pg100.txt +servingman pg100.txt +servingman. pg100.txt +servingman.] pg100.txt +servingmen pg100.txt +servingmen, pg100.txt +serviteur. pg100.txt +servitor pg100.txt +servitor, pg100.txt +servitor. pg100.txt +servitors pg100.txt +servitors, pg100.txt +servitude, pg100.txt +sessa! pg100.txt +session pg3200.txt, pg100.txt +session, pg3200.txt +session. pg3200.txt, pg100.txt +session." pg3200.txt +sessions pg3200.txt +sessions, pg3200.txt +sestos. pg100.txt +set pg31100.txt, pg3200.txt, pg100.txt +set, pg31100.txt, pg3200.txt, pg100.txt +set. pg31100.txt, pg3200.txt, pg100.txt +set.] pg31100.txt +set; pg31100.txt, pg100.txt +set? pg100.txt +set?" pg3200.txt +setchell pg3200.txt +setebos, pg100.txt +sets pg31100.txt, pg3200.txt, pg100.txt +settee pg3200.txt +setting pg31100.txt, pg3200.txt, pg100.txt +setting-room. pg3200.txt +setting. pg3200.txt +setting." pg3200.txt +setting; pg3200.txt +settle pg31100.txt, pg3200.txt, pg100.txt +settle, pg31100.txt, pg100.txt +settle--but pg3200.txt +settle--no pg3200.txt +settle." pg3200.txt +settle?" pg3200.txt +settled pg31100.txt, pg3200.txt +settled, pg31100.txt, pg3200.txt, pg100.txt +settled--and pg3200.txt +settled. pg31100.txt, pg3200.txt +settled." pg31100.txt, pg3200.txt +settled; pg31100.txt +settled? pg31100.txt +settlement pg3200.txt +settlement's pg3200.txt +settlement, pg3200.txt +settlement. pg31100.txt, pg3200.txt +settlement; pg3200.txt +settlements pg3200.txt +settlements. pg3200.txt +settler pg3200.txt +settler-- pg3200.txt +settlers pg3200.txt +settles pg3200.txt +settling pg31100.txt, pg3200.txt +settling. pg100.txt +settlings pg3200.txt +seven pg31100.txt, pg3200.txt, pg100.txt +seven! pg31100.txt +seven, pg3200.txt, pg100.txt +seven- pg3200.txt +seven-- pg3200.txt +seven-eighths pg3200.txt +seven-league pg3200.txt +seven-tenths pg3200.txt +seven-up pg3200.txt +seven-up, pg3200.txt +seven-year-old pg3200.txt +seven. pg3200.txt, pg100.txt +seven." pg3200.txt +seventeen pg31100.txt, pg3200.txt +seventeen, pg31100.txt, pg3200.txt +seventeen. pg3200.txt +seventeen." pg3200.txt +seventeenth pg3200.txt +seventeenth," pg3200.txt +seventh pg3200.txt +seventh's pg3200.txt +seventh, pg3200.txt +seventieth pg3200.txt +seventy pg3200.txt +seventy- pg3200.txt +seventy-five pg3200.txt +seventy-five--" pg3200.txt +seventy-four pg3200.txt +seventy-mile pg3200.txt +seventy-one pg3200.txt +seventy-three pg3200.txt +seventy-two pg3200.txt +seventy. pg3200.txt +sever'd pg100.txt +several pg31100.txt, pg3200.txt +several, pg3200.txt +several. pg31100.txt +several." pg31100.txt +severally pg100.txt +severally. pg31100.txt, pg100.txt +severals pg100.txt +severe pg31100.txt, pg3200.txt +severe, pg3200.txt +severe. pg31100.txt, pg100.txt +severe." pg31100.txt +severe: pg3200.txt +severe; pg31100.txt, pg100.txt +severed pg3200.txt +severely pg31100.txt, pg3200.txt +severely-- pg3200.txt +severely. pg3200.txt +severely?' pg3200.txt +severing. pg100.txt +severity pg31100.txt, pg3200.txt +severity, pg100.txt +severity. pg31100.txt, pg100.txt +severity: pg3200.txt +severus; pg31100.txt +sew pg100.txt +sew.' pg100.txt +sewage. pg3200.txt +sewed pg3200.txt +sewed. pg3200.txt +sewer pg3200.txt +sewer! pg100.txt +sewer, pg3200.txt +sewer-gas pg3200.txt +sewer." pg3200.txt +sewer.' pg3200.txt +sewing pg3200.txt +sewing-machine pg3200.txt +sewing-work pg3200.txt +sews pg3200.txt +sex pg31100.txt, pg3200.txt, pg100.txt +sex's pg3200.txt +sex, pg31100.txt, pg3200.txt, pg100.txt +sex--were pg3200.txt +sex. pg3200.txt, pg100.txt +sex." pg31100.txt, pg3200.txt +sex; pg31100.txt, pg3200.txt +sex? pg100.txt +sex?" pg3200.txt +sexes pg3200.txt +sexes, pg3200.txt +sexes. pg3200.txt +sexes." pg31100.txt +sexless, pg3200.txt +sextant pg3200.txt +sexton's pg3200.txt +sexton. pg100.txt +seymour- pg100.txt +seyton! pg100.txt +seyton. pg100.txt +sh't!" pg3200.txt +sh-she--" pg3200.txt +sh-she. pg3200.txt +sha'n't pg3200.txt +sha'n't' pg3200.txt +sha'nt pg3200.txt +shabbiness pg3200.txt +shabby pg31100.txt, pg3200.txt +shabby, pg31100.txt +shackle pg100.txt +shackleby pg3200.txt +shackles pg31100.txt, pg3200.txt +shackles! pg100.txt +shad. pg3200.txt +shadders pg3200.txt +shade pg31100.txt, pg3200.txt, pg100.txt +shade, pg3200.txt, pg100.txt +shade. pg31100.txt, pg3200.txt, pg100.txt +shade." pg31100.txt, pg3200.txt +shade?" pg3200.txt +shaded pg3200.txt +shades pg31100.txt, pg3200.txt +shades. pg3200.txt +shadow pg31100.txt, pg3200.txt, pg100.txt +shadow! pg3200.txt, pg100.txt +shadow, pg3200.txt +shadow. pg3200.txt, pg100.txt +shadow." pg3200.txt +shadow; pg3200.txt, pg100.txt +shadow? pg100.txt +shadowed. pg3200.txt +shadows pg3200.txt, pg100.txt +shadows, pg3200.txt, pg100.txt +shadows--grew pg3200.txt +shadows. pg3200.txt, pg100.txt +shadows; pg3200.txt +shadowy pg3200.txt +shadwells pg3200.txt +shady pg3200.txt +shady, pg3200.txt +shady. pg3200.txt +shaft pg3200.txt, pg100.txt +shaft, pg3200.txt, pg100.txt +shaft. pg3200.txt, pg100.txt +shaft; pg3200.txt +shafts pg3200.txt +shafts, pg3200.txt +shafts. pg3200.txt +shah pg3200.txt +shak'd, pg100.txt +shake pg31100.txt, pg3200.txt, pg100.txt +shake, pg31100.txt +shake. pg3200.txt, pg100.txt +shake." pg3200.txt +shake; pg100.txt +shaken pg31100.txt, pg3200.txt, pg100.txt +shaken. pg3200.txt +shaken; pg100.txt +shaker pg3200.txt +shaker, pg3200.txt +shakers, pg3200.txt +shakes pg100.txt +shakes. pg3200.txt +shakespear's pg31100.txt +shakespeare pg31100.txt, pg3200.txt, pg100.txt +shakespeare! pg3200.txt +shakespeare's pg3200.txt +shakespeare's! pg3200.txt +shakespeare's. pg3200.txt +shakespeare, pg3200.txt, pg100.txt +shakespeare--if pg3200.txt +shakespeare. pg3200.txt +shakespeare." pg3200.txt +shakespeare; pg3200.txt +shakespearean pg3200.txt +shakespearean." pg3200.txt +shakespeares pg3200.txt +shakespearites pg3200.txt +shakespearites. pg3200.txt +shaking pg31100.txt, pg3200.txt +shaking. pg3200.txt +shaky pg3200.txt +shaky. pg3200.txt +shall!' pg3200.txt +shall, pg31100.txt, pg100.txt +shall- pg100.txt +shall--" pg3200.txt +shall--bitte, pg3200.txt +shall. pg31100.txt, pg3200.txt, pg100.txt +shall." pg31100.txt +shall: pg3200.txt +shall; pg100.txt +shall?" pg31100.txt +shallop, pg3200.txt +shallow pg3200.txt, pg100.txt +shallow! pg100.txt +shallow, pg3200.txt, pg100.txt +shallow- pg100.txt +shallow-rooted; pg100.txt +shallow. pg3200.txt, pg100.txt +shallow; pg100.txt +shallow? pg100.txt +shalt pg3200.txt, pg100.txt +shalt. pg100.txt +sham pg3200.txt +sham'd pg100.txt +sham'd. pg100.txt +sham'd; pg100.txt +sham's pg3200.txt +sham, pg3200.txt +sham- pg3200.txt +sham. pg3200.txt +shambles, pg100.txt +shambling pg3200.txt +shame pg31100.txt, pg3200.txt, pg100.txt +shame! pg3200.txt, pg100.txt +shame!" pg3200.txt +shame, pg3200.txt, pg100.txt +shame- pg100.txt +shame--" pg3200.txt +shame--and pg3200.txt +shame. pg3200.txt, pg100.txt +shame." pg31100.txt, pg3200.txt +shame: pg3200.txt, pg100.txt +shame; pg31100.txt, pg3200.txt, pg100.txt +shame? pg100.txt +shame?" pg3200.txt +shamed pg3200.txt +shamed! pg100.txt +shamefaced pg3200.txt +shameful pg31100.txt, pg3200.txt +shamefully pg3200.txt +shamefully, pg100.txt +shamefully. pg3200.txt +shameless pg3200.txt +shameless. pg100.txt +shames pg3200.txt, pg100.txt +shames, pg100.txt +shames. pg100.txt +shames? pg100.txt +shaming pg100.txt +shamrock. pg3200.txt +shams pg3200.txt +shams. pg3200.txt +shan't pg3200.txt +shanties. pg3200.txt +shanty pg3200.txt +shanty, pg3200.txt +shap'd pg100.txt +shape pg31100.txt, pg3200.txt, pg100.txt +shape! pg100.txt +shape!" pg3200.txt +shape!- pg100.txt +shape, pg3200.txt, pg100.txt +shape. pg3200.txt, pg100.txt +shape.... pg3200.txt +shape: pg3200.txt +shape; pg3200.txt, pg100.txt +shape?" pg3200.txt +shaped pg31100.txt, pg3200.txt +shaped. pg3200.txt +shapeless pg31100.txt +shapely pg3200.txt +shapes pg3200.txt, pg100.txt +shapes, pg3200.txt, pg100.txt +shapes. pg3200.txt +shapes; pg100.txt +shaping pg31100.txt +shar'd, pg100.txt +share pg31100.txt, pg3200.txt, pg100.txt +share, pg3200.txt +share. pg31100.txt, pg3200.txt, pg100.txt +share." pg31100.txt, pg3200.txt +share; pg31100.txt +share?" pg3200.txt +shared pg31100.txt, pg3200.txt, pg100.txt +shareholders pg3200.txt +shares pg31100.txt, pg3200.txt, pg100.txt +shares! pg3200.txt +shares--out pg3200.txt +shares. pg3200.txt, pg100.txt +sharing pg31100.txt, pg3200.txt +shark pg31100.txt, pg3200.txt +shark, pg3200.txt, pg100.txt +shark--desire pg3200.txt +shark-skin. pg3200.txt +shark." pg3200.txt +shark?" pg3200.txt +sharks pg3200.txt +sharks, pg3200.txt +sharks. pg3200.txt +sharon pg3200.txt +sharon. pg3200.txt +sharp pg31100.txt, pg3200.txt, pg100.txt +sharp, pg31100.txt, pg3200.txt +sharp-set, pg3200.txt +sharp. pg3200.txt, pg100.txt +sharp." pg3200.txt +sharper pg3200.txt +sharper." pg3200.txt +sharpers, pg3200.txt +sharping; pg3200.txt +sharply pg3200.txt +sharply, pg3200.txt, pg100.txt +sharply-- pg3200.txt +sharply. pg3200.txt, pg100.txt +sharply: pg3200.txt +sharpness pg3200.txt +sharpness. pg100.txt +sharps. pg100.txt +sharpshooters pg3200.txt +shave pg100.txt +shave, pg3200.txt +shave. pg3200.txt +shaved pg3200.txt +shaved, pg3200.txt +shaved--there, pg3200.txt +shaven pg3200.txt +shaven, pg3200.txt +shaw. pg100.txt +shawl pg31100.txt, pg3200.txt +shawl-- pg31100.txt +shawl-straps, pg3200.txt +shawl. pg31100.txt +shawls, pg3200.txt +shawnee pg3200.txt +shaxpur pg3200.txt +shaxpur. pg3200.txt +shaxpur? pg3200.txt +she! pg3200.txt +she'd pg3200.txt +she'd-- pg3200.txt +she'll pg3200.txt, pg100.txt +she's pg3200.txt, pg100.txt +she) pg31100.txt +she, pg31100.txt, pg3200.txt, pg100.txt +she,--" pg31100.txt +she,--' pg3200.txt +she- pg100.txt +she--" pg3200.txt +she--accordingly pg31100.txt +she-bear, pg100.txt +she-brew pg3200.txt +she-englishwoman." pg3200.txt +she-foxes! pg100.txt +she. pg31100.txt, pg3200.txt, pg100.txt +she." pg3200.txt +she.) pg31100.txt +she.--"i pg31100.txt +she.--"the pg31100.txt +she: pg3200.txt +she; pg31100.txt, pg3200.txt, pg100.txt +she? pg3200.txt, pg100.txt +she?" pg31100.txt, pg3200.txt +she?' pg3200.txt +sheaf pg3200.txt +sheaf, pg100.txt +shearing pg3200.txt +shearing. pg100.txt +sheath pg3200.txt, pg100.txt +sheath'd pg100.txt +sheath, pg3200.txt, pg100.txt +sheathe pg100.txt +sheathing pg3200.txt +sheathing; pg100.txt +sheaves pg100.txt +sheba pg3200.txt +sheba's pg3200.txt +shechem pg3200.txt +shechem, pg3200.txt +shed pg31100.txt, pg3200.txt, pg100.txt +shed, pg3200.txt, pg100.txt +shed. pg100.txt +shed; pg100.txt +shed? pg100.txt +shed?" pg3200.txt +shedding pg31100.txt +shedding! pg100.txt +sheds pg3200.txt, pg100.txt +sheds, pg3200.txt, pg100.txt +sheen pg3200.txt, pg100.txt +sheen, pg100.txt +sheep pg3200.txt, pg100.txt +sheep, pg31100.txt, pg3200.txt, pg100.txt +sheep- pg100.txt +sheep--" pg3200.txt +sheep-dip pg3200.txt +sheep-head, pg3200.txt +sheep-shearing pg100.txt +sheep-shearing, pg100.txt +sheep-shearing. pg100.txt +sheep-signal." pg3200.txt +sheep. pg3200.txt, pg100.txt +sheep." pg3200.txt +sheep; pg100.txt +sheep? pg100.txt +sheepish pg3200.txt +sheepishly pg3200.txt +sheepskin pg31100.txt +sheepskin. pg3200.txt +sheepskins? pg100.txt +sheer pg3200.txt +sheet pg3200.txt +sheet! pg3200.txt +sheet, pg3200.txt, pg100.txt +sheet-music. pg3200.txt +sheet-saying: pg3200.txt +sheet. pg3200.txt, pg100.txt +sheet: pg3200.txt +sheet; pg3200.txt, pg100.txt +sheet? pg31100.txt, pg100.txt +sheet?" pg3200.txt +sheets pg31100.txt, pg3200.txt, pg100.txt +sheets! pg100.txt +sheets, pg3200.txt, pg100.txt +sheets- pg100.txt +sheets. pg100.txt +sheets; pg31100.txt, pg100.txt +sheffield, pg100.txt +sheffield?" pg3200.txt +sheik pg3200.txt +shekels pg3200.txt +shekels, pg3200.txt +shekels." pg3200.txt +shekels? pg3200.txt +shekels?" pg3200.txt +shelby." pg3200.txt +shelf pg3200.txt +shell pg3200.txt +shell, pg3200.txt +shell. pg3200.txt, pg100.txt +shell? pg100.txt +shelled pg3200.txt +shellers. pg3200.txt +shelley pg3200.txt +shelley's pg3200.txt +shelley's--as pg3200.txt +shelley's. pg3200.txt +shelley, pg3200.txt +shelley. pg3200.txt +shelley." pg3200.txt +shelley: pg3200.txt +shelley; pg3200.txt +shelleys pg3200.txt +shells pg3200.txt +shells; pg3200.txt +shelt'red pg100.txt +shelter pg31100.txt, pg3200.txt +shelter! pg3200.txt +shelter, pg31100.txt, pg3200.txt, pg100.txt +shelter. pg3200.txt +shelter; pg100.txt +shelter?" pg3200.txt +sheltered pg3200.txt +sheltered, pg31100.txt +sheltering pg3200.txt +shelterless." pg3200.txt +shelved. pg3200.txt +shelved.' pg3200.txt +shelves pg100.txt +shelves. pg3200.txt +shensi-2-year pg3200.txt +shent pg100.txt +shent, pg100.txt +sheol pg3200.txt +shepannik. pg3200.txt +shepherd pg31100.txt, pg3200.txt, pg100.txt +shepherd! pg100.txt +shepherd's pg3200.txt, pg100.txt +shepherd's) pg31100.txt +shepherd, pg31100.txt, pg100.txt +shepherd--or pg3200.txt +shepherd. pg100.txt +shepherd; pg100.txt +shepherd? pg100.txt +shepherd?" pg31100.txt +shepherdes pg100.txt +shepherdess pg100.txt +shepherdess- pg100.txt +shepherdesses pg100.txt +shepherds, pg3200.txt, pg100.txt +shepherds--glimpses pg3200.txt +shepherds. pg100.txt +shepherdson pg3200.txt +shepherdson?" pg3200.txt +shepherdsons pg3200.txt +shepherdsons?" pg3200.txt +shepley pg3200.txt +sheppard; pg3200.txt +sherburn. pg3200.txt +sheridan pg3200.txt +sheridan, pg3200.txt +sheriff pg3200.txt, pg100.txt +sheriff!" pg3200.txt +sheriff's pg3200.txt +sheriff, pg31100.txt, pg3200.txt, pg100.txt +sheriff. pg3200.txt, pg100.txt +sheriff." pg3200.txt +sheriff: pg3200.txt +sheriff? pg100.txt +sheriff?" pg3200.txt +sheriffs pg3200.txt +sherman pg3200.txt +sherris, pg100.txt +sherris. pg100.txt +sherry pg3200.txt +sherry, pg3200.txt +shet pg3200.txt +shet, pg3200.txt +shevyott pg3200.txt +shew pg31100.txt +shewed pg31100.txt +sheweth." pg31100.txt +shewing pg31100.txt +shewn pg31100.txt +shewn, pg31100.txt +shews pg31100.txt +shibboleth. pg3200.txt +shied pg3200.txt +shield pg31100.txt, pg3200.txt, pg100.txt +shield, pg3200.txt, pg100.txt +shield--" pg3200.txt +shield. pg100.txt +shielded pg3200.txt, pg100.txt +shielded. pg100.txt +shields pg3200.txt +shields, pg3200.txt +shields. pg3200.txt +shies pg3200.txt +shift pg31100.txt, pg3200.txt, pg100.txt +shift! pg100.txt +shift, pg100.txt +shift. pg100.txt +shifted pg3200.txt +shifting pg31100.txt, pg3200.txt +shifting, pg3200.txt +shifts pg100.txt +shifts, pg100.txt +shifts. pg100.txt +shifty pg3200.txt +shillaber pg3200.txt +shilling pg31100.txt, pg3200.txt, pg100.txt +shilling--her pg31100.txt +shilling. pg100.txt +shilling] pg100.txt +shillings pg31100.txt, pg3200.txt +shillings. pg100.txt +shilly-shally; pg3200.txt +shimmery, pg3200.txt +shin pg3200.txt +shin, pg100.txt +shin. pg100.txt +shin? pg100.txt +shine pg31100.txt, pg3200.txt, pg100.txt +shine! pg100.txt +shine) pg100.txt +shine, pg100.txt +shine- pg100.txt +shine. pg100.txt +shine; pg3200.txt, pg100.txt +shine? pg100.txt +shines pg3200.txt, pg100.txt +shines, pg3200.txt, pg100.txt +shingle pg3200.txt +shingle-bundles. pg3200.txt +shingle-nails pg3200.txt +shingles pg3200.txt +shining pg3200.txt +shining, pg3200.txt +shining. pg3200.txt +shining; pg3200.txt +shinplasters pg3200.txt +shins pg3200.txt +shins, pg100.txt +shins. pg100.txt +shins; pg3200.txt +shintoists, pg3200.txt +shiny pg3200.txt +shiny, pg3200.txt +ship pg3200.txt, pg100.txt +ship's pg3200.txt +ship, pg3200.txt, pg100.txt +ship- pg100.txt +ship--solemnity, pg3200.txt +ship-captain pg3200.txt +ship-load pg3200.txt +ship-mates; pg3200.txt +ship-shape. pg3200.txt +ship. pg3200.txt, pg100.txt +ship." pg3200.txt +ship.' pg3200.txt +ship; pg3200.txt, pg100.txt +ship;' pg3200.txt +ship?" pg3200.txt +shipboard pg3200.txt, pg100.txt +shipboard? pg100.txt +shipman pg3200.txt +shipmate pg3200.txt +shipmate, pg3200.txt +shipmates; pg3200.txt +shipments pg3200.txt +shipp'd, pg100.txt +shipp'd. pg100.txt +shipp'd? pg100.txt +shipped pg31100.txt, pg3200.txt +shipped!' pg3200.txt +shipping pg3200.txt +shipping- pg100.txt +ships pg31100.txt, pg3200.txt, pg100.txt +ships, pg31100.txt, pg3200.txt, pg100.txt +ships--make pg3200.txt +ships. pg3200.txt, pg100.txt +ships.' pg3200.txt +ships; pg3200.txt +ships? pg100.txt +shipshape pg3200.txt +shipwreck pg3200.txt +shipwrecks. pg3200.txt +shipwright, pg100.txt +shire pg100.txt +shirk--for pg3200.txt +shirked pg3200.txt +shirking. pg3200.txt +shirley, pg31100.txt +shirley. pg31100.txt +shirt pg3200.txt +shirt!" pg3200.txt +shirt, pg3200.txt +shirt-front pg3200.txt +shirt. pg3200.txt, pg100.txt +shirt." pg3200.txt +shirt; pg3200.txt +shirt?" pg3200.txt +shirts pg3200.txt, pg100.txt +shirts, pg3200.txt +shirts. pg100.txt +shiva, pg3200.txt +shivaree pg3200.txt +shiver pg3200.txt +shiver, pg3200.txt +shiver. pg3200.txt +shiver." pg3200.txt +shivered pg3200.txt +shiverin' pg3200.txt +shivering pg31100.txt, pg3200.txt +shivering--thinking pg3200.txt +shivers pg3200.txt +shivers. pg100.txt +shiz pg3200.txt +sho!--and pg3200.txt +sho' pg3200.txt +sho'! pg3200.txt +sho'!" pg3200.txt +sho', pg3200.txt +sho'. pg3200.txt +sho'." pg3200.txt +shoal pg3200.txt +shoal, pg3200.txt +shoal. pg3200.txt +shoalest pg3200.txt +shoaling pg3200.txt +shoals pg3200.txt +shock pg31100.txt, pg3200.txt, pg100.txt +shock! pg3200.txt +shock, pg3200.txt +shock. pg3200.txt +shock; pg3200.txt +shocked pg31100.txt, pg3200.txt +shocked! pg31100.txt +shocked, pg3200.txt +shocked. pg3200.txt +shocked; pg31100.txt +shocking pg31100.txt, pg3200.txt +shocking!" pg31100.txt +shocking. pg3200.txt +shocking." pg31100.txt +shocks pg3200.txt, pg100.txt +shoddy, pg3200.txt +shoe pg3200.txt, pg100.txt +shoe, pg100.txt +shoe-leather. pg3200.txt +shoe. pg3200.txt, pg100.txt +shoe? pg100.txt +shoe] pg100.txt +shoeing pg100.txt +shoemaker pg3200.txt +shoemaker, pg3200.txt +shoemaker.' pg3200.txt +shoemakers, pg3200.txt +shoes pg3200.txt, pg100.txt +shoes, pg31100.txt, pg3200.txt, pg100.txt +shoes. pg3200.txt, pg100.txt +shoes." pg3200.txt +shoes; pg100.txt +shoes?' pg3200.txt +shone pg31100.txt, pg3200.txt +shook pg31100.txt, pg3200.txt, pg100.txt +shook, pg3200.txt +shook--she pg3200.txt +shook. pg100.txt +shoon, pg100.txt +shoon. pg100.txt +shoot pg31100.txt, pg3200.txt, pg100.txt +shoot!" pg3200.txt +shoot, pg100.txt +shoot. pg100.txt +shoot." pg31100.txt +shoot: pg100.txt +shoot?" pg3200.txt +shooter. pg100.txt +shooter? pg100.txt +shootie pg100.txt +shooting pg3200.txt +shooting. pg3200.txt, pg100.txt +shooting?" pg3200.txt +shootings. pg3200.txt +shoots pg3200.txt, pg100.txt +shop pg31100.txt, pg3200.txt, pg100.txt +shop, pg3200.txt, pg100.txt +shop--carpenters pg3200.txt +shop-boy pg31100.txt +shop-keepers pg3200.txt +shop-talk pg3200.txt +shop. pg3200.txt, pg100.txt +shop." pg31100.txt, pg3200.txt +shop; pg31100.txt +shop?" pg3200.txt +shopkeeper pg3200.txt +shopkeepers, pg3200.txt +shopmates. pg3200.txt +shopping, pg3200.txt +shops pg31100.txt, pg3200.txt +shops, pg3200.txt +shops. pg100.txt +shops; pg3200.txt +shopwoman pg3200.txt +shorbs. pg3200.txt +shore pg3200.txt, pg100.txt +shore! pg100.txt +shore'? pg100.txt +shore, pg31100.txt, pg3200.txt, pg100.txt +shore--" pg3200.txt +shore--they pg3200.txt +shore. pg3200.txt, pg100.txt +shore." pg3200.txt +shore.-- pg3200.txt +shore; pg3200.txt, pg100.txt +shore? pg100.txt +shoreless pg3200.txt +shoreless, pg3200.txt +shores pg3200.txt, pg100.txt +shores, pg3200.txt, pg100.txt +shores. pg100.txt +shores? pg100.txt +shorn pg3200.txt +short pg31100.txt, pg3200.txt, pg100.txt +short! pg100.txt +short, pg31100.txt, pg3200.txt, pg100.txt +short--all pg3200.txt +short--and pg31100.txt +short-handed pg3200.txt +short-lived. pg3200.txt +short-stop, pg3200.txt +short-tailed pg3200.txt +short-winded. pg100.txt +short. pg31100.txt, pg3200.txt, pg100.txt +short." pg31100.txt, pg3200.txt +short; pg31100.txt, pg100.txt +short? pg100.txt +short?" pg31100.txt +shortcomings. pg3200.txt +shorten pg3200.txt +shortened pg3200.txt +shortened. pg100.txt +shortening pg3200.txt +shorter pg31100.txt, pg3200.txt, pg100.txt +shorter, pg31100.txt, pg3200.txt +shorter. pg3200.txt +shorter; pg100.txt +shortest pg3200.txt +shorthand. pg3200.txt +shortly pg31100.txt, pg3200.txt, pg100.txt +shortly, pg31100.txt, pg3200.txt, pg100.txt +shortly. pg3200.txt, pg100.txt +shortly? pg100.txt +shortness pg31100.txt, pg100.txt +shorts, pg3200.txt +shot pg3200.txt, pg100.txt +shot) pg31100.txt +shot, pg31100.txt, pg3200.txt, pg100.txt +shot-bag pg3200.txt +shot-gun; pg3200.txt +shot-tower, pg3200.txt +shot. pg3200.txt, pg100.txt +shot." pg3200.txt +shot.' pg100.txt +shot; pg3200.txt, pg100.txt +shot?" pg3200.txt +shot?' pg3200.txt +shotgun pg3200.txt +shotgun, pg3200.txt +shotgun; pg3200.txt +shotguns; pg3200.txt +shots pg3200.txt +shots, pg3200.txt +shots. pg3200.txt +should, pg31100.txt, pg3200.txt, pg100.txt +should- pg100.txt +should. pg31100.txt, pg3200.txt, pg100.txt +should." pg31100.txt, pg3200.txt +should.' pg100.txt +should; pg3200.txt, pg100.txt +should?" pg31100.txt +shoulder pg3200.txt, pg100.txt +shoulder, pg3200.txt, pg100.txt +shoulder--" pg3200.txt +shoulder-joints, pg3200.txt +shoulder. pg3200.txt, pg100.txt +shoulder." pg3200.txt +shoulder; pg3200.txt +shoulder? pg3200.txt +shouldered pg3200.txt +shouldered.' pg3200.txt +shouldering pg3200.txt +shoulders pg31100.txt, pg3200.txt, pg100.txt +shoulders, pg3200.txt, pg100.txt +shoulders. pg3200.txt, pg100.txt +shoulders." pg31100.txt, pg3200.txt +shoulders; pg100.txt +shoulders? pg100.txt +shouldn't pg3200.txt +shouldn't, pg3200.txt +shouldn't. pg3200.txt +shouldst pg100.txt +shout pg3200.txt, pg100.txt +shout! pg100.txt +shout!--it pg3200.txt +shout, pg3200.txt, pg100.txt +shout-- pg3200.txt +shout. pg3200.txt, pg100.txt +shout: pg3200.txt +shout; pg3200.txt +shout] pg100.txt +shouted pg3200.txt +shouted, pg3200.txt +shouted-- pg3200.txt +shouted--laughing--think pg3200.txt +shouted. pg100.txt +shouted: pg3200.txt +shouted; pg3200.txt +shouting pg3200.txt +shouting, pg3200.txt +shouting-- pg3200.txt +shouting." pg3200.txt +shouting: pg3200.txt +shouting? pg3200.txt +shoutings pg3200.txt +shoutings, pg3200.txt +shouts pg3200.txt, pg100.txt +shouts. pg100.txt +shouts: pg3200.txt +shouts? pg100.txt +shouts] pg100.txt +shove pg3200.txt +shove' pg3200.txt +shoved pg3200.txt +shoved, pg3200.txt +shoved. pg3200.txt +shoved; pg3200.txt +shovel pg3200.txt +shovel, pg3200.txt +shovel-hatted, pg3200.txt +shovel. pg3200.txt +shoveled pg3200.txt +shovelful pg3200.txt +shovelfuls pg3200.txt +shoveling pg3200.txt +shovels pg3200.txt +shovels, pg3200.txt +shoves pg3200.txt +shoving pg3200.txt +show pg31100.txt, pg3200.txt, pg100.txt +show! pg100.txt +show'd pg100.txt +show'r pg100.txt +show'rs, pg100.txt +show'rs. pg100.txt +show'rs; pg100.txt +show'st, pg100.txt +show, pg3200.txt, pg100.txt +show- pg100.txt +show--" pg3200.txt +show-case, pg3200.txt +show-peaks pg3200.txt +show-woman pg3200.txt +show. pg3200.txt, pg100.txt +show." pg3200.txt +show: pg3200.txt, pg100.txt +show; pg3200.txt, pg100.txt +show?" pg3200.txt +showed pg31100.txt, pg3200.txt +showed, pg3200.txt +showed. pg31100.txt, pg3200.txt +showed; pg3200.txt +shower pg3200.txt +shower!--we pg31100.txt +shower-bath. pg3200.txt +shower. pg3200.txt +showered pg3200.txt +showering," pg3200.txt +showers pg3200.txt, pg100.txt +showers, pg100.txt +showers. pg100.txt +showest, pg100.txt +showeth pg3200.txt +showily pg3200.txt +showing pg31100.txt, pg3200.txt +showing, pg100.txt +showing." pg3200.txt +showing; pg3200.txt +showman pg3200.txt +shown pg31100.txt, pg3200.txt, pg100.txt +shown, pg31100.txt, pg3200.txt, pg100.txt +shown. pg3200.txt, pg100.txt +shown: pg100.txt +shown; pg100.txt +shows pg31100.txt, pg3200.txt, pg100.txt +shows, pg3200.txt, pg100.txt +shows--the pg3200.txt +shows. pg31100.txt, pg3200.txt, pg100.txt +shows; pg100.txt +showy pg3200.txt +showy. pg3200.txt +shrank pg3200.txt +shred pg3200.txt +shredding pg3200.txt +shreds pg3200.txt, pg100.txt +shreds, pg3200.txt +shreds. pg3200.txt +shrew pg100.txt +shrew, pg100.txt +shrew. pg100.txt +shrew." pg3200.txt +shrewd pg3200.txt, pg100.txt +shrewd, pg3200.txt +shrewd. pg100.txt +shrewd; pg100.txt +shrewdest pg3200.txt +shrewdly pg3200.txt, pg100.txt +shrewdly. pg100.txt +shrewdness pg31100.txt +shrewishly; pg100.txt +shrewishness; pg100.txt +shrewsbury pg100.txt +shrewsbury, pg100.txt +shrewsbury. pg100.txt +shrewsbury; pg100.txt +shrewsbury? pg100.txt +shriek pg3200.txt +shriek! pg3200.txt +shriek-- pg3200.txt +shriek--for pg3200.txt +shriek--now pg3200.txt +shriek; pg3200.txt, pg100.txt +shrieked pg3200.txt +shrieked, pg3200.txt +shrieking pg3200.txt +shrieking. pg3200.txt +shrieks, pg100.txt +shrieks. pg3200.txt +shrift. pg100.txt +shrift; pg100.txt +shrimp pg100.txt +shrimp, pg100.txt +shrine pg3200.txt, pg100.txt +shrine, pg3200.txt +shrine. pg3200.txt +shrine? pg100.txt +shrines pg3200.txt +shrines. pg3200.txt +shrink pg31100.txt, pg3200.txt +shrink, pg3200.txt +shrink. pg31100.txt, pg100.txt +shrinkage. pg3200.txt +shrinking pg31100.txt +shrinking. pg100.txt +shrivelled pg3200.txt +shropshire. pg31100.txt +shroud pg3200.txt, pg100.txt +shroud, pg100.txt +shroud- pg100.txt +shroud. pg100.txt +shroud., pg100.txt +shrouded pg3200.txt +shrouds pg3200.txt +shrove-tide. pg100.txt +shrow. pg100.txt +shrows! pg100.txt +shrub pg3200.txt, pg100.txt +shrubberies, pg31100.txt +shrubberies--always pg31100.txt +shrubbery pg31100.txt, pg3200.txt +shrubbery, pg31100.txt +shrubs pg3200.txt +shrubs, pg3200.txt +shrubs. pg3200.txt +shrubs; pg3200.txt +shrug, pg100.txt +shrug.] pg3200.txt +shrugging pg31100.txt +shrunk pg3200.txt +shrunk! pg100.txt +shrunk-up pg3200.txt +shucked. pg3200.txt +shucks pg3200.txt +shucks! pg3200.txt +shucks." pg3200.txt +shudder pg3200.txt +shudder, pg3200.txt +shudder. pg3200.txt +shudder: pg3200.txt +shudder; pg3200.txt +shuddered pg3200.txt +shuddered, pg3200.txt +shuddered. pg3200.txt +shuddered; pg3200.txt +shuddering pg3200.txt +shuddering, pg3200.txt +shudderings. pg3200.txt +shudders pg3200.txt +shuffle pg3200.txt +shuffled pg3200.txt +shuffling pg31100.txt +shuffling. pg3200.txt +shun pg100.txt +shun, pg100.txt +shunem pg3200.txt +shunem. pg3200.txt +shunn'd pg100.txt +shunn'd, pg100.txt +shunned pg3200.txt +shunning. pg100.txt +shut pg31100.txt, pg3200.txt, pg100.txt +shut, pg31100.txt, pg3200.txt, pg100.txt +shut--we pg3200.txt +shut. pg3200.txt, pg100.txt +shut.' pg3200.txt +shut; pg3200.txt +shuts pg3200.txt +shutter, pg31100.txt +shutter. pg3200.txt +shutter; pg3200.txt +shutters pg3200.txt +shutters; pg3200.txt +shutting pg3200.txt +shuttlecock. pg3200.txt +shy pg3200.txt +shy, pg31100.txt, pg3200.txt +shy. pg3200.txt +shy." pg31100.txt, pg3200.txt +shying pg3200.txt +shylock pg100.txt +shylock. pg100.txt +shylock? pg100.txt +shyly: pg3200.txt +shyness pg31100.txt +shyness; pg31100.txt +si pg3200.txt +si!" pg3200.txt +si----" pg3200.txt +si." pg3200.txt +si?" pg3200.txt +siam pg3200.txt +siam, pg3200.txt +siam. pg3200.txt +siam." pg3200.txt +siberia pg3200.txt +siberia--guide-books pg3200.txt +siberia. pg3200.txt +siberia." pg3200.txt +siberian pg3200.txt +sich pg3200.txt +sich. pg3200.txt +sicil, pg100.txt +sicilia pg100.txt +sicilia, pg100.txt +sicilia. pg100.txt +sicilia? pg100.txt +sicilius pg100.txt +sicily pg3200.txt, pg100.txt +sicily, pg100.txt +sicinius pg100.txt +sicinius! pg100.txt +sicinius. pg100.txt +sick pg31100.txt, pg3200.txt, pg100.txt +sick! pg100.txt +sick!" pg3200.txt +sick, pg3200.txt, pg100.txt +sick--and pg3200.txt +sick--three pg3200.txt +sick-chamber pg31100.txt +sick-chamber. pg3200.txt +sick-nurse: pg3200.txt +sick-room pg3200.txt +sick-room. pg3200.txt +sick. pg3200.txt, pg100.txt +sick." pg3200.txt +sick: pg3200.txt +sick; pg3200.txt, pg100.txt +sick? pg100.txt +sick?" pg3200.txt +sicken pg3200.txt +sicken. pg100.txt +sickening pg3200.txt +sickens pg3200.txt +sicker pg3200.txt +sicker." pg3200.txt +sickest pg3200.txt +sickly pg31100.txt, pg3200.txt, pg100.txt +sickly, pg31100.txt, pg3200.txt +sickly; pg31100.txt +sickness pg31100.txt, pg3200.txt, pg100.txt +sickness, pg100.txt +sickness- pg100.txt +sickness. pg3200.txt, pg100.txt +sickness? pg100.txt +sicknesses pg3200.txt +sickroom pg31100.txt +sicyon. pg100.txt +sid pg3200.txt +sid, pg3200.txt +sid. pg3200.txt +sid." pg3200.txt +sid; pg3200.txt +sid?" pg3200.txt +side pg31100.txt, pg3200.txt, pg100.txt +side! pg100.txt +side!--as pg31100.txt +side" pg3200.txt +side), pg3200.txt +side, pg31100.txt, pg3200.txt, pg100.txt +side," pg31100.txt +side- pg3200.txt +side--" pg3200.txt +side--but pg31100.txt +side-arms--but pg3200.txt +side-glance pg3200.txt +side-head, pg3200.txt +side-lights pg3200.txt +side-remarks pg3200.txt +side-saddles. pg3200.txt +side-tracked pg3200.txt +side-tracked. pg3200.txt +side-walks pg3200.txt +side-whiskers pg3200.txt +side. pg31100.txt, pg3200.txt, pg100.txt +side." pg31100.txt, pg3200.txt +side.' pg3200.txt +side.--"i pg31100.txt +side.--well, pg3200.txt +side: pg31100.txt +side; pg31100.txt, pg3200.txt, pg100.txt +side? pg3200.txt, pg100.txt +side?- pg100.txt +sideboard pg31100.txt, pg3200.txt +sideboard. pg3200.txt +sided pg3200.txt, pg100.txt +sidelhorn. pg3200.txt +sides pg3200.txt, pg100.txt +sides! pg100.txt +sides, pg31100.txt, pg3200.txt, pg100.txt +sides,) pg3200.txt +sides--low pg3200.txt +sides. pg3200.txt, pg100.txt +sides; pg31100.txt, pg3200.txt +sidewalk pg3200.txt +sidewalk, pg3200.txt +sidewalk. pg3200.txt +sidewalks pg3200.txt +sidewalks, pg3200.txt +sideways, pg3200.txt +sidewise. pg3200.txt +sidled pg3200.txt +sie pg3200.txt +sie-- pg3200.txt +siege pg3200.txt, pg100.txt +siege! pg100.txt +siege--" pg3200.txt +siege--and pg3200.txt +siege. pg3200.txt, pg100.txt +siege: pg3200.txt +siege; pg100.txt +sieges pg3200.txt +sieges, pg3200.txt +sierras pg3200.txt +sieur pg3200.txt +sieve pg100.txt +sieve, pg100.txt +sifted pg3200.txt +sifted, pg3200.txt, pg100.txt +sifted. pg3200.txt +sigh pg31100.txt, pg3200.txt, pg100.txt +sigh'd pg100.txt +sigh, pg31100.txt, pg3200.txt, pg100.txt +sigh-- pg31100.txt, pg3200.txt +sigh--"but pg3200.txt +sigh. pg31100.txt, pg3200.txt, pg100.txt +sigh.] pg3200.txt +sigh: pg31100.txt, pg3200.txt +sigh; pg100.txt +sigh?' pg3200.txt +sighed pg31100.txt, pg3200.txt +sighed, pg3200.txt +sighed." pg3200.txt +sighing pg31100.txt, pg100.txt +sighing, pg3200.txt +sighing: pg3200.txt +sighings pg3200.txt +sighs pg31100.txt, pg3200.txt, pg100.txt +sighs, pg3200.txt, pg100.txt +sighs. pg3200.txt, pg100.txt +sighs; pg100.txt +sight pg31100.txt, pg3200.txt, pg100.txt +sight! pg3200.txt, pg100.txt +sight!" pg3200.txt +sight!- pg100.txt +sight" pg31100.txt +sight, pg31100.txt, pg3200.txt, pg100.txt +sight," pg3200.txt +sight- pg100.txt +sight--it's pg3200.txt +sight--sign pg3200.txt +sight--the pg3200.txt +sight-seeing. pg3200.txt +sight-surfeited pg3200.txt +sight. pg31100.txt, pg3200.txt, pg100.txt +sight." pg31100.txt, pg3200.txt +sight._"] pg31100.txt +sight: pg100.txt +sight; pg3200.txt, pg100.txt +sight? pg100.txt +sight?' pg100.txt +sighting pg3200.txt +sightly pg3200.txt +sights pg31100.txt, pg3200.txt, pg100.txt +sights, pg3200.txt, pg100.txt +sights--as pg3200.txt +sights. pg3200.txt, pg100.txt +sights; pg100.txt +sightseeing pg3200.txt +sigismund-siegfriend-lauenfeld-dinkelspiel-schwartzenberg pg3200.txt +sign pg31100.txt, pg3200.txt, pg100.txt +sign, pg3200.txt, pg100.txt +sign--but pg3200.txt +sign--curious pg3200.txt +sign--if pg3200.txt +sign-language, pg3200.txt +sign-language-hardest-worked pg3200.txt +sign-painter's pg3200.txt +sign-painter, pg3200.txt +sign. pg3200.txt, pg100.txt +sign." pg3200.txt +sign; pg100.txt +sign?" pg3200.txt +signal pg31100.txt, pg3200.txt +signal, pg3200.txt +signal-station. pg3200.txt +signal. pg3200.txt, pg100.txt +signal." pg3200.txt +signature pg3200.txt +signature, pg3200.txt +signature. pg3200.txt +signature." pg3200.txt +signature.] pg3200.txt +signatures pg3200.txt +signatures? pg3200.txt +signed pg3200.txt +signed--what? pg3200.txt +signed." pg31100.txt +signet pg100.txt +significance pg3200.txt +significance, pg3200.txt +significance. pg3200.txt +significant pg31100.txt +significant. pg3200.txt +significant." pg3200.txt +significantly pg3200.txt +significantly.) pg3200.txt +signified pg3200.txt, pg100.txt +signified. pg100.txt +signifies pg3200.txt +signifies. pg3200.txt +signify pg31100.txt, pg3200.txt, pg100.txt +signify, pg31100.txt +signify. pg31100.txt, pg3200.txt, pg100.txt +signify." pg31100.txt, pg3200.txt +signify; pg31100.txt +signify?' pg3200.txt +signifying pg3200.txt +signing pg3200.txt +signior pg100.txt +signior, pg100.txt +signior. pg100.txt +signior; pg100.txt +signior? pg100.txt +signiories, pg100.txt +signiors, pg100.txt +signiory, pg100.txt +signories, pg100.txt +signs pg31100.txt, pg3200.txt, pg100.txt +signs! pg100.txt +signs, pg3200.txt, pg100.txt +signs. pg3200.txt, pg100.txt +signs." pg3200.txt +signs: pg3200.txt +signs; pg3200.txt, pg100.txt +signs? pg100.txt +signum! pg100.txt +silas pg3200.txt +silas's pg3200.txt +silas, pg3200.txt +silenc'd pg100.txt +silenc'd? pg100.txt +silence pg31100.txt, pg3200.txt, pg100.txt +silence! pg100.txt +silence, pg31100.txt, pg3200.txt, pg100.txt +silence," pg3200.txt +silence-- pg31100.txt, pg3200.txt +silence--a pg3200.txt +silence. pg31100.txt, pg3200.txt, pg100.txt +silence." pg3200.txt +silence.) pg3200.txt +silence: pg3200.txt +silence; pg31100.txt, pg3200.txt, pg100.txt +silence? pg100.txt +silenced pg31100.txt +silenced, pg31100.txt, pg3200.txt +silenced. pg31100.txt +silences pg3200.txt +silent pg31100.txt, pg3200.txt, pg100.txt +silent! pg3200.txt +silent, pg31100.txt, pg3200.txt, pg100.txt +silent-- pg3200.txt +silent. pg31100.txt, pg3200.txt, pg100.txt +silent." pg31100.txt, pg3200.txt +silent? pg100.txt +silent] pg100.txt +silently pg31100.txt, pg3200.txt +silhouette pg3200.txt +silius pg100.txt +silius, pg100.txt +silius: pg100.txt +silk pg3200.txt, pg100.txt +silk, pg100.txt +silk--" pg3200.txt +silk-and-velvet pg3200.txt +silk-spinners pg3200.txt +silk. pg100.txt +silk." pg3200.txt +silk: pg100.txt +silken pg3200.txt +silks pg3200.txt +silks, pg3200.txt, pg100.txt +silky pg3200.txt +silliest pg3200.txt +silly pg31100.txt, pg3200.txt, pg100.txt +silly! pg31100.txt, pg3200.txt +silly, pg31100.txt, pg3200.txt +silly. pg31100.txt +silly." pg31100.txt +silurians pg3200.txt +silva pg100.txt +silver pg31100.txt, pg3200.txt, pg100.txt +silver! pg100.txt +silver'd, pg100.txt +silver'd. pg100.txt +silver's pg3200.txt +silver), pg3200.txt +silver, pg3200.txt, pg100.txt +silver--at pg3200.txt +silver--hey?" pg3200.txt +silver--to pg3200.txt +silver-bowed pg3200.txt +silver-bright pg100.txt +silver-gilt pg3200.txt +silver-headed pg3200.txt +silver-mill, pg3200.txt +silver-mines pg3200.txt +silver-mining pg3200.txt +silver-mining--threw pg3200.txt +silver-mining. pg3200.txt +silver-mounted pg3200.txt +silver-tipped pg3200.txt +silver-white pg100.txt +silver. pg3200.txt, pg100.txt +silver." pg3200.txt +silver; pg3200.txt +silvered pg3200.txt +silverite pg3200.txt +silverware, pg3200.txt +silvia pg100.txt +silvia! pg100.txt +silvia'! pg100.txt +silvia- pg100.txt +silvia. pg100.txt +silvia: pg100.txt +silvia? pg100.txt +silvius pg100.txt +silvius. pg100.txt +silvius? pg100.txt +similar pg31100.txt, pg3200.txt +similar, pg3200.txt +similarity pg3200.txt +simile pg3200.txt +simile, pg3200.txt +similes. pg100.txt +simmons pg3200.txt +simmons," pg3200.txt +simmons--and pg3200.txt +simmons." pg3200.txt +simnel pg31100.txt +simnel. pg3200.txt +simois' pg100.txt +simon pg3200.txt +simon's pg3200.txt +simon-pure pg3200.txt +simon-pure, pg3200.txt +simonetti--a pg3200.txt +simpathetic pg31100.txt +simpathy pg31100.txt +simpcox pg100.txt +simpering pg31100.txt, pg3200.txt +simple pg31100.txt, pg3200.txt, pg100.txt +simple! pg100.txt +simple, pg31100.txt, pg3200.txt +simple-- pg3200.txt +simple-hearted pg3200.txt +simple-minded pg31100.txt +simple. pg3200.txt +simple." pg3200.txt +simple: pg3200.txt +simple; pg3200.txt +simple? pg3200.txt +simpleminded pg3200.txt +simpleness. pg100.txt +simpler pg3200.txt +simplest pg31100.txt, pg3200.txt +simplest. pg3200.txt +simpleton pg31100.txt +simpleton!" pg3200.txt +simpleton; pg3200.txt +simpleton? pg3200.txt +simpletons pg3200.txt +simplicity pg31100.txt, pg3200.txt, pg100.txt +simplicity, pg31100.txt, pg3200.txt, pg100.txt +simplicity," pg3200.txt +simplicity-- pg3200.txt +simplicity. pg3200.txt, pg100.txt +simplicity." pg3200.txt +simplicity: pg3200.txt +simplicity; pg100.txt +simplicity?' pg3200.txt +simplified pg3200.txt +simplify pg3200.txt +simplifying.--none pg3200.txt +simply pg31100.txt, pg3200.txt, pg100.txt +simply, pg3200.txt +simply: pg3200.txt +simply; pg100.txt +simultane. pg3200.txt +simultaneously pg3200.txt +simultaneously, pg3200.txt +simultaneously. pg3200.txt +sin pg31100.txt, pg3200.txt, pg100.txt +sin! pg100.txt +sin" pg3200.txt +sin's pg3200.txt +sin, pg31100.txt, pg3200.txt, pg100.txt +sin--but pg3200.txt +sin. pg3200.txt, pg100.txt +sin." pg3200.txt +sin; pg3200.txt, pg100.txt +sin? pg3200.txt +sin?" pg3200.txt +sinai pg3200.txt +sinai. pg3200.txt +since pg31100.txt, pg3200.txt, pg100.txt +since". pg31100.txt +since, pg31100.txt, pg3200.txt, pg100.txt +since--" pg3200.txt +since--er--since pg3200.txt +since--yes, pg3200.txt +since. pg3200.txt, pg100.txt +since." pg31100.txt, pg3200.txt +since: pg3200.txt +since; pg31100.txt, pg3200.txt +since? pg3200.txt, pg100.txt +since?" pg3200.txt +sincere pg31100.txt, pg3200.txt +sincere, pg31100.txt, pg3200.txt +sincere. pg31100.txt, pg3200.txt +sincere; pg31100.txt +sincerely pg31100.txt, pg3200.txt +sincerely, pg31100.txt, pg3200.txt +sincerely. pg3200.txt, pg100.txt +sincerely." pg31100.txt +sincerely: pg3200.txt +sincerity pg31100.txt, pg3200.txt +sincerity, pg31100.txt, pg3200.txt, pg100.txt +sincerity. pg3200.txt +sincerity; pg31100.txt +sind pg3200.txt +sinecure, pg3200.txt +sinecure. pg3200.txt +sinew pg100.txt +sinews pg100.txt +sinews, pg100.txt +sinews; pg100.txt +sinful pg3200.txt +sinful, pg3200.txt +sinful. pg3200.txt +sing pg31100.txt, pg3200.txt, pg100.txt +sing! pg3200.txt, pg100.txt +sing, pg31100.txt, pg3200.txt, pg100.txt +sing-sing, pg3200.txt +sing. pg3200.txt, pg100.txt +sing: pg100.txt +sing; pg3200.txt, pg100.txt +sing? pg100.txt +sing?" pg31100.txt, pg3200.txt +sing] pg100.txt +singapore-penang,..............399 pg3200.txt +singe pg3200.txt +singeing pg3200.txt +singer pg3200.txt +singer's pg3200.txt +singer. pg3200.txt +singer?" pg3200.txt +singers pg3200.txt +singers--6. pg3200.txt +singing pg31100.txt, pg3200.txt, pg100.txt +singing, pg31100.txt, pg3200.txt +singing-- pg3200.txt +singing--roaring, pg3200.txt +singing. pg3200.txt, pg100.txt +singing.--there pg31100.txt +singing: pg3200.txt +singing; pg31100.txt, pg100.txt +single pg31100.txt, pg3200.txt, pg100.txt +single" pg3200.txt +single, pg31100.txt, pg100.txt +single-barreled pg3200.txt +single-barrelled pg3200.txt +single-handed, pg3200.txt +single-handed." pg3200.txt +single." pg31100.txt +single: pg100.txt +single?" pg31100.txt +singled pg3200.txt +singleness! pg100.txt +singling pg31100.txt +singly. pg100.txt +sings pg31100.txt, pg3200.txt, pg100.txt +sings! pg100.txt +sings, pg3200.txt, pg100.txt +sings. pg100.txt +sings.] pg100.txt +sings: pg3200.txt +singular pg31100.txt, pg3200.txt +singular. pg3200.txt, pg100.txt +singular." pg31100.txt, pg3200.txt +singularly pg31100.txt, pg3200.txt +singuled pg100.txt +sinister pg100.txt +sinister, pg100.txt +sinister. pg3200.txt +sinister?" pg3200.txt +sink pg31100.txt, pg3200.txt, pg100.txt +sink! pg100.txt +sink, pg3200.txt +sink-a-pace. pg100.txt +sink. pg100.txt +sink? pg100.txt +sinking pg3200.txt +sinking, pg100.txt +sinks pg3200.txt +sinless pg3200.txt +sinless; pg3200.txt +sinn'd pg100.txt +sinn'd, pg100.txt +sinned pg3200.txt +sinned." pg3200.txt +sinned; pg3200.txt +sinner!" pg3200.txt +sinner, pg3200.txt +sinner. pg3200.txt, pg100.txt +sinners pg3200.txt +sinners! pg100.txt +sinners--even pg3200.txt +sinners. pg3200.txt +sinning. pg3200.txt, pg100.txt +sins pg3200.txt, pg100.txt +sins! pg100.txt +sins!" pg3200.txt +sins, pg3200.txt, pg100.txt +sins. pg3200.txt, pg100.txt +sins." pg3200.txt +sins: pg100.txt +sins; pg3200.txt +sinuosities, pg3200.txt +sinuous pg3200.txt +sinuous. pg3200.txt +sioux, pg3200.txt +sipped pg3200.txt +sipping pg3200.txt +sipping, pg100.txt +sipping. pg3200.txt +sir pg31100.txt, pg3200.txt, pg100.txt +sir! pg3200.txt, pg100.txt +sir!" pg3200.txt +sir!' pg3200.txt, pg100.txt +sir"-- pg3200.txt +sir"---- pg3200.txt +sir, pg31100.txt, pg3200.txt, pg100.txt +sir," pg3200.txt +sir,' pg100.txt +sir,-- pg31100.txt +sir,--" pg3200.txt +sir- pg100.txt +sir--" pg3200.txt +sir--leave pg3200.txt +sir--or pg31100.txt +sir. pg31100.txt, pg3200.txt, pg100.txt +sir." pg31100.txt, pg3200.txt +sir.' pg3200.txt, pg100.txt +sir.- pg100.txt +sir: pg100.txt +sir; pg31100.txt, pg3200.txt, pg100.txt +sir? pg100.txt +sir?" pg31100.txt, pg3200.txt +sir?' pg3200.txt +sire pg3200.txt, pg100.txt +sire, pg100.txt +sire- pg100.txt +sire." pg3200.txt +sire: pg100.txt +sire; pg100.txt +sirloin pg3200.txt +sirrah! pg100.txt +sirrah, pg100.txt +sirrah. pg100.txt +sirrah; pg100.txt +sirrah? pg100.txt +sirs, pg100.txt +sirs. pg100.txt +sirs; pg100.txt +sis pg3200.txt +sis! pg3200.txt +sis." pg3200.txt +sister pg31100.txt, pg3200.txt, pg100.txt +sister! pg31100.txt +sister!" pg31100.txt +sister's pg31100.txt +sister's, pg31100.txt +sister's. pg31100.txt +sister's?" pg31100.txt +sister, pg31100.txt, pg3200.txt, pg100.txt +sister- pg100.txt +sister-in-law pg31100.txt +sister-in-law's pg31100.txt +sister-in-law. pg31100.txt +sister. pg31100.txt, pg3200.txt, pg100.txt +sister." pg31100.txt +sister.' pg3200.txt +sister; pg31100.txt, pg100.txt +sister? pg3200.txt, pg100.txt +sister?" pg31100.txt +sisterhood, pg100.txt +sisterly pg31100.txt +sisters pg31100.txt, pg3200.txt, pg100.txt +sisters! pg100.txt +sisters, pg31100.txt, pg3200.txt, pg100.txt +sisters. pg31100.txt, pg3200.txt, pg100.txt +sisters." pg31100.txt +sisters: pg31100.txt, pg100.txt +sisters; pg31100.txt, pg100.txt +sisters? pg31100.txt, pg3200.txt, pg100.txt +sit pg31100.txt, pg3200.txt, pg100.txt +sit, pg3200.txt, pg100.txt +sit. pg3200.txt, pg100.txt +sit; pg31100.txt, pg100.txt +sit? pg100.txt +sit?" pg3200.txt +sit] pg100.txt +site pg3200.txt +site; pg3200.txt +sites, pg3200.txt +sites." pg3200.txt +sith pg3200.txt, pg100.txt +sithence? pg100.txt +siting pg3200.txt +sits pg31100.txt, pg3200.txt, pg100.txt +sits, pg100.txt +sits- pg100.txt +sits. pg100.txt +sits] pg100.txt +sitting pg31100.txt, pg3200.txt, pg100.txt +sitting!' pg3200.txt +sitting, pg31100.txt, pg3200.txt, pg100.txt +sitting-room, pg3200.txt +sitting. pg31100.txt, pg3200.txt +sitting.--m.t. pg3200.txt +sitting; pg3200.txt +sittings, pg3200.txt +sittings. pg3200.txt +sittings?" pg3200.txt +situate pg3200.txt +situate. pg100.txt +situated pg31100.txt, pg3200.txt +situated, pg3200.txt +situated--another pg3200.txt +situated. pg31100.txt, pg3200.txt +situated." pg31100.txt +situated? pg3200.txt +situation pg31100.txt, pg3200.txt +situation, pg31100.txt, pg3200.txt +situation--alas, pg3200.txt +situation--elaborates, pg3200.txt +situation. pg31100.txt, pg3200.txt +situation." pg31100.txt, pg3200.txt +situation; pg3200.txt +situation? pg31100.txt, pg3200.txt +situation?' pg3200.txt +situations pg31100.txt, pg3200.txt +situations, pg31100.txt, pg3200.txt +situations; pg3200.txt +siward. pg100.txt +siward; pg100.txt +six pg31100.txt, pg3200.txt, pg100.txt +six, pg3200.txt +six--and pg3200.txt +six-and--' pg3200.txt +six-and-fifty, pg3200.txt +six-and-thirty pg31100.txt +six-or-seven-times-honour'd pg100.txt +six-shooter, pg3200.txt +six-twenty pg3200.txt +six. pg3200.txt, pg100.txt +six." pg3200.txt +six; pg3200.txt +sixpence pg3200.txt, pg100.txt +sixpence. pg3200.txt +sixpence?" pg3200.txt +sixpenny pg100.txt +sixteen pg31100.txt, pg3200.txt +sixteen! pg31100.txt +sixteen, pg31100.txt, pg3200.txt, pg100.txt +sixteen. pg3200.txt +sixteen; pg3200.txt +sixteenth pg3200.txt +sixth pg3200.txt, pg100.txt +sixth! pg100.txt +sixth, pg3200.txt +sixth. pg3200.txt +sixty pg3200.txt +sixty!" pg3200.txt +sixty, pg3200.txt, pg100.txt +sixty- pg3200.txt +sixty-five pg3200.txt +sixty-five. pg3200.txt +sixty-four pg3200.txt +sixty-four. pg3200.txt +sixty-nine. pg3200.txt +sixty-six pg3200.txt +sixty-two pg3200.txt +sixty. pg3200.txt +size pg31100.txt, pg3200.txt, pg100.txt +size, pg31100.txt, pg3200.txt +size. pg31100.txt, pg3200.txt, pg100.txt +size." pg3200.txt +size; pg31100.txt, pg3200.txt, pg100.txt +sizeable pg3200.txt +sizes pg3200.txt +sizes, pg3200.txt, pg100.txt +sizes. pg3200.txt +sizzled--for pg3200.txt +sk'ylarks!' pg3200.txt +skasely pg3200.txt +skasely." pg3200.txt +skating-rink. pg3200.txt +skeet pg3200.txt +skeins pg3200.txt +skeleton pg3200.txt +skeleton-plan pg3200.txt +skeleton. pg3200.txt +skeleton.' pg3200.txt +skeleton] pg3200.txt +skeletons pg3200.txt +skeletons. pg3200.txt +skepticism pg3200.txt +sketch pg31100.txt, pg3200.txt +sketch, pg31100.txt, pg3200.txt +sketch. pg31100.txt, pg3200.txt +sketch.] pg3200.txt +sketched pg3200.txt +sketches pg3200.txt +sketches" pg3200.txt +sketches, pg3200.txt +sketches. pg3200.txt +sketches.] pg3200.txt +sketches? pg3200.txt +sketching pg3200.txt +skibo. pg3200.txt +skies pg31100.txt, pg3200.txt, pg100.txt +skies, pg3200.txt +skies. pg3200.txt, pg100.txt +skies; pg3200.txt +skies? pg100.txt +skiff pg3200.txt +skiff, pg3200.txt +skiff-loads pg3200.txt +skiff. pg3200.txt +skiff." pg3200.txt +skiffs pg3200.txt +skilful pg3200.txt +skilfully. pg100.txt +skill pg31100.txt, pg3200.txt, pg100.txt +skill! pg100.txt +skill'd; pg100.txt +skill, pg100.txt +skill. pg3200.txt, pg100.txt +skilled, pg3200.txt +skillful pg100.txt +skills pg100.txt +skimmed pg3200.txt +skimming pg3200.txt +skimp pg3200.txt +skin pg31100.txt, pg3200.txt, pg100.txt +skin, pg31100.txt, pg3200.txt, pg100.txt +skin-diseases. pg3200.txt +skin. pg3200.txt, pg100.txt +skin: pg100.txt +skinflint." pg3200.txt +skinned pg3200.txt +skinned, pg3200.txt +skinned. pg3200.txt +skinned." pg3200.txt +skinners. pg31100.txt +skinning pg3200.txt +skins pg3200.txt, pg100.txt +skins, pg3200.txt +skins. pg3200.txt +skip pg3200.txt +skip, pg3200.txt +skip--still, pg3200.txt +skip. pg3200.txt +skipped. pg3200.txt +skipper's pg3200.txt +skipping pg3200.txt +skips pg3200.txt +skirmish, pg100.txt +skirmish. pg3200.txt +skirmish] pg100.txt +skirting pg3200.txt +skirts pg3200.txt, pg100.txt +skirts, pg3200.txt +skits pg3200.txt +skittish, pg3200.txt +skreeky pg3200.txt +skulking pg3200.txt +skulks pg3200.txt +skull pg3200.txt +skull, pg3200.txt +skull. pg3200.txt, pg100.txt +skull.] pg100.txt +skull]. pg100.txt +skulls. pg3200.txt, pg100.txt +skulls; pg3200.txt, pg100.txt +skunk!" pg3200.txt +skurrying pg3200.txt +sky pg3200.txt, pg100.txt +sky, pg31100.txt, pg3200.txt, pg100.txt +sky--you pg3200.txt +sky-blue pg3200.txt +sky-lights pg3200.txt +sky-line pg3200.txt +sky-scrapers!" pg3200.txt +sky. pg3200.txt, pg100.txt +sky." pg3200.txt +sky: pg3200.txt, pg100.txt +sky; pg3200.txt +sky? pg3200.txt +skylight pg3200.txt +skylight, pg3200.txt +skylight. pg3200.txt +skyrocket, pg3200.txt +skyward pg3200.txt +skywards. pg3200.txt +slab pg3200.txt +slab. pg100.txt +slabs pg3200.txt +slack pg3200.txt, pg100.txt +slack. pg3200.txt +slacken pg3200.txt +slackened, pg3200.txt +slade pg3200.txt +slade! pg3200.txt +slade's pg3200.txt +slade, pg3200.txt +slade--lamentations pg3200.txt +slade. pg3200.txt +slain pg100.txt +slain! pg100.txt +slain, pg3200.txt, pg100.txt +slain. pg100.txt +slain.' pg100.txt +slain; pg3200.txt, pg100.txt +slain? pg100.txt +slam pg31100.txt, pg3200.txt +slam-bang pg3200.txt +sland'red pg100.txt +slander pg3200.txt, pg100.txt +slander, pg100.txt +slander-] pg100.txt +slander. pg3200.txt, pg100.txt +slandered pg3200.txt +slanderer! pg100.txt +slanderers. pg100.txt +slanderous pg31100.txt +slanders pg3200.txt, pg100.txt +slanders?" pg3200.txt +slang pg3200.txt +slant pg3200.txt +slant, pg3200.txt +slanted pg3200.txt +slanting pg3200.txt +slap pg3200.txt +slapped pg3200.txt +slapping pg3200.txt +slaps pg3200.txt +slash pg3200.txt +slash, pg100.txt +slashed pg3200.txt +slate?" pg3200.txt +slates pg3200.txt +slatternly, pg31100.txt +slaught'red, pg100.txt +slaughter pg3200.txt, pg100.txt +slaughter! pg3200.txt +slaughter!" pg3200.txt +slaughter'd, pg100.txt +slaughter, pg100.txt +slaughter-house pg3200.txt +slaughter-house, pg100.txt +slaughter-house. pg100.txt +slaughter-house." pg3200.txt +slaughter-house; pg100.txt +slaughter-pen. pg3200.txt +slaughter. pg3200.txt, pg100.txt +slaughter; pg3200.txt, pg100.txt +slaughter? pg100.txt +slaughtered pg3200.txt +slaughterer pg100.txt +slaughtering pg3200.txt +slaughterman, pg100.txt +slaughtermen. pg100.txt +slaughters; pg100.txt +slav' pg3200.txt +slave pg31100.txt, pg3200.txt, pg100.txt +slave! pg3200.txt, pg100.txt +slave, pg31100.txt, pg3200.txt, pg100.txt +slave- pg100.txt +slave--en pg3200.txt +slave-chain, pg3200.txt +slave-lord, pg3200.txt +slave-lords pg3200.txt +slave-quarters pg3200.txt +slave-tyrants!"--which pg3200.txt +slave. pg3200.txt, pg100.txt +slave." pg3200.txt +slave.' pg100.txt +slave; pg3200.txt +slave? pg100.txt +slaveholder's pg3200.txt +slaveholders pg3200.txt +slaveries, pg3200.txt +slavery pg3200.txt, pg100.txt +slavery, pg3200.txt +slavery. pg3200.txt, pg100.txt +slavery? pg3200.txt +slaves pg3200.txt, pg100.txt +slaves! pg3200.txt, pg100.txt +slaves, pg3200.txt, pg100.txt +slaves. pg3200.txt, pg100.txt +slaves." pg3200.txt +slaves; pg3200.txt, pg100.txt +slaving, pg3200.txt +slay pg3200.txt, pg100.txt +slay, pg3200.txt +slayer. pg3200.txt +slays pg100.txt +sledge pg3200.txt +sledge" pg3200.txt +sledgehammer. pg3200.txt +sledges. pg3200.txt +sleek pg3200.txt +sleeman pg3200.txt +sleep pg31100.txt, pg3200.txt, pg100.txt +sleep! pg100.txt +sleep!--for pg3200.txt +sleep'st; pg100.txt +sleep, pg31100.txt, pg3200.txt, pg100.txt +sleep- pg100.txt +sleep--all pg3200.txt +sleep. pg31100.txt, pg3200.txt, pg100.txt +sleep." pg3200.txt +sleep.' pg3200.txt, pg100.txt +sleep: pg100.txt +sleep; pg31100.txt, pg3200.txt, pg100.txt +sleep? pg3200.txt, pg100.txt +sleep?" pg3200.txt +sleep] pg100.txt +sleeper pg3200.txt +sleeper. pg31100.txt +sleeper?" pg3200.txt +sleepers pg3200.txt, pg100.txt +sleepers. pg3200.txt +sleepest. pg100.txt +sleepiest pg3200.txt +sleepily pg3200.txt +sleepily-- pg3200.txt +sleepiness pg3200.txt +sleeping pg31100.txt, pg3200.txt, pg100.txt +sleeping, pg3200.txt +sleeping- pg100.txt +sleeping-benches pg3200.txt +sleeping-cars pg3200.txt +sleeping-place pg3200.txt +sleeping-room pg31100.txt +sleeping. pg3200.txt, pg100.txt +sleepless pg31100.txt +sleepless, pg31100.txt +sleeps pg3200.txt, pg100.txt +sleeps--sleeps pg3200.txt +sleeps. pg3200.txt, pg100.txt +sleeps? pg100.txt +sleeps] pg100.txt +sleepy pg100.txt +sleepy, pg100.txt +sleepy--if pg3200.txt +sleepy. pg3200.txt, pg100.txt +sleeting, pg3200.txt +sleeve pg3200.txt, pg100.txt +sleeve! pg3200.txt, pg100.txt +sleeve'- pg100.txt +sleeve, pg3200.txt, pg100.txt +sleeve--jist pg3200.txt +sleeve. pg3200.txt, pg100.txt +sleeve; pg100.txt +sleeves pg3200.txt +sleeves, pg3200.txt +sleeves. pg3200.txt, pg100.txt +sleeves." pg31100.txt +sleeves; pg3200.txt +sleeves? pg100.txt +sleigh pg3200.txt +sleigh. pg3200.txt +sleights pg100.txt +slender pg3200.txt, pg100.txt +slender, pg100.txt +slender- pg100.txt +slender-limbed, pg3200.txt +slender. pg3200.txt, pg100.txt +slender; pg3200.txt +slender? pg100.txt +slender?" pg3200.txt +slenderer. pg100.txt +slenderly pg100.txt +slept pg3200.txt, pg100.txt +slept, pg3200.txt, pg100.txt +slept--those pg3200.txt +slept. pg3200.txt, pg100.txt +slept." pg3200.txt +slept; pg3200.txt, pg100.txt +slept? pg100.txt +slew pg3200.txt, pg100.txt +slew'st pg100.txt +slew, pg100.txt +slew. pg100.txt +slewed pg3200.txt +slice pg31100.txt, pg3200.txt +sliced pg3200.txt +slices pg3200.txt +slick pg3200.txt +slick, pg3200.txt +slid pg3200.txt +slide pg3200.txt, pg100.txt +slide!" pg3200.txt +slide. pg100.txt +sliding pg3200.txt +slight pg31100.txt, pg3200.txt, pg100.txt +slight, pg31100.txt, pg3200.txt +slight. pg3200.txt, pg100.txt +slight." pg31100.txt +slight? pg100.txt +slighted pg31100.txt +slighted, pg31100.txt, pg100.txt +slightest pg31100.txt, pg3200.txt +slighting pg31100.txt +slightingly pg31100.txt +slightly pg3200.txt, pg100.txt +slightly, pg31100.txt, pg3200.txt, pg100.txt +slightly; pg3200.txt +slights, pg3200.txt +slights; pg3200.txt +slim pg3200.txt +slim, pg3200.txt +slime pg100.txt +slime, pg3200.txt +slimy; pg3200.txt +sling pg3200.txt +slings; pg100.txt +slip pg3200.txt, pg100.txt +slip, pg3200.txt +slip-shod. pg100.txt +slip. pg100.txt +slip; pg100.txt +slipp'd pg100.txt +slipp'ry pg100.txt +slipped pg3200.txt +slipped. pg31100.txt +slipper. pg100.txt +slippers pg3200.txt +slippers!" pg3200.txt +slippers, pg31100.txt, pg3200.txt +slippers--oh, pg3200.txt +slippery pg3200.txt +slipping pg3200.txt +slips pg100.txt +slips, pg100.txt +slips- pg100.txt +slipshod pg3200.txt +slit pg3200.txt +slits pg3200.txt +slivers pg3200.txt +sloop pg31100.txt +sloop. pg31100.txt +slop, pg3200.txt +slop-shop pg3200.txt +slop-shops. pg3200.txt +slop-tub. pg3200.txt +slop. pg100.txt +slope pg3200.txt, pg100.txt +slope, pg31100.txt +slope,' pg3200.txt +slope. pg3200.txt +slopes pg3200.txt +slopes. pg3200.txt +sloping pg31100.txt, pg3200.txt +slopjar?" pg3200.txt +slops. pg3200.txt +slops? pg100.txt +sloshing pg3200.txt +sloth pg3200.txt, pg100.txt +sloth. pg100.txt +slouch pg3200.txt +slouch. pg3200.txt +slouchburg pg3200.txt +slouchburg: pg3200.txt +slouched pg3200.txt +slough. pg3200.txt +sloughs pg3200.txt +slovenly pg3200.txt +slovenry. pg100.txt +slow pg31100.txt, pg3200.txt, pg100.txt +slow! pg3200.txt +slow'd.- pg100.txt +slow, pg3200.txt, pg100.txt +slow-consuming pg3200.txt +slow-moving pg3200.txt +slow. pg31100.txt, pg100.txt +slow." pg3200.txt +slow: pg100.txt +slow; pg31100.txt, pg3200.txt +slow? pg100.txt +slow?" pg3200.txt +slow?--oh, pg3200.txt +slowed pg3200.txt +slower pg3200.txt +slowest pg3200.txt +slowly pg31100.txt, pg3200.txt +slowly, pg3200.txt +slowly--"so pg31100.txt +slowly--and pg31100.txt +slowly. pg3200.txt +slowly." pg31100.txt +slowly: pg3200.txt +slowly; pg3200.txt +slug-abed! pg100.txt +sluggish pg31100.txt, pg3200.txt +sluggish-brained pg3200.txt +slugs, pg3200.txt +slugs. pg3200.txt +sluiced pg3200.txt +slumb'red pg100.txt +slumber pg3200.txt, pg100.txt +slumber, pg100.txt +slumber. pg3200.txt +slumber; pg100.txt +slumbering pg3200.txt +slumberous pg3200.txt +slumbers pg3200.txt +slumbers. pg3200.txt, pg100.txt +slumbery pg100.txt +slumbrous pg3200.txt +slumped pg3200.txt +slumps pg3200.txt +slums pg3200.txt +slums, pg3200.txt +slung pg3200.txt +slunk pg3200.txt +slur pg3200.txt +slur, pg3200.txt +slur? pg3200.txt +slurs pg3200.txt +slush pg3200.txt +slush!--' pg3200.txt +slush, pg3200.txt +slush. pg3200.txt +slush?' pg3200.txt +slut pg100.txt +sluts, pg100.txt +sluttery. pg100.txt +sly pg31100.txt, pg100.txt +sly, pg3200.txt, pg100.txt +sly. pg3200.txt, pg100.txt +sly." pg3200.txt +sly; pg31100.txt +slyly pg31100.txt, pg3200.txt +slyly, pg3200.txt +smack pg3200.txt, pg100.txt +smack." pg3200.txt +small pg31100.txt, pg3200.txt, pg100.txt +small! pg3200.txt +small's pg3200.txt +small, pg31100.txt, pg3200.txt, pg100.txt +small--wants pg31100.txt +small-fry pg3200.txt +small-pored pg3200.txt +small-pox pg3200.txt +small-pox, pg31100.txt +small-souled pg3200.txt +small. pg3200.txt, pg100.txt +small." pg3200.txt +small; pg3200.txt +small?" pg31100.txt +smaller pg3200.txt +smallest pg31100.txt, pg3200.txt +smallest. pg100.txt +smallness pg31100.txt, pg3200.txt +smallpox! pg3200.txt +smallridge pg31100.txt +smallridge, pg31100.txt +smalus, pg100.txt +smart pg31100.txt, pg3200.txt, pg100.txt +smart, pg31100.txt, pg3200.txt, pg100.txt +smart--but pg31100.txt +smart. pg100.txt +smart." pg3200.txt +smart; pg31100.txt, pg100.txt +smarted pg3200.txt +smartened pg31100.txt +smartest pg3200.txt +smarties pg3200.txt +smartly pg3200.txt +smartness pg31100.txt +smartness. pg3200.txt +smarty?" pg3200.txt +smash pg3200.txt +smash!" pg3200.txt +smash. pg3200.txt +smashed pg3200.txt +smashed; pg3200.txt +smashing pg3200.txt +smashing, pg3200.txt +smatterer pg3200.txt +smatterings pg3200.txt +smear pg100.txt +smear'd pg100.txt +smeared pg3200.txt +smell pg3200.txt, pg100.txt +smell. pg3200.txt, pg100.txt +smelled pg3200.txt +smelling pg3200.txt +smelling-bottle--it pg3200.txt +smells pg3200.txt, pg100.txt +smells, pg3200.txt, pg100.txt +smells. pg3200.txt +smelt pg3200.txt, pg100.txt +smelt. pg3200.txt +smil'd? pg100.txt +smil'st, pg100.txt +smile pg31100.txt, pg3200.txt, pg100.txt +smile! pg3200.txt, pg100.txt +smile!" pg3200.txt +smile) pg31100.txt +smile, pg31100.txt, pg3200.txt, pg100.txt +smile. pg31100.txt, pg3200.txt, pg100.txt +smile." pg31100.txt, pg3200.txt +smile.) pg31100.txt +smile: pg31100.txt, pg3200.txt +smile; pg31100.txt, pg3200.txt, pg100.txt +smiled pg31100.txt, pg3200.txt +smiled, pg31100.txt, pg3200.txt +smiled. pg31100.txt, pg3200.txt +smiled; pg31100.txt +smiles pg31100.txt, pg3200.txt, pg100.txt +smiles, pg31100.txt, pg3200.txt, pg100.txt +smiles. pg31100.txt, pg3200.txt, pg100.txt +smiles: pg3200.txt +smiles; pg3200.txt, pg100.txt +smilets pg100.txt +smiley pg3200.txt +smiley's pg3200.txt +smiley, pg3200.txt +smiley: pg3200.txt +smiling pg31100.txt, pg3200.txt, pg100.txt +smiling, pg31100.txt, pg3200.txt, pg100.txt +smiling--"the pg31100.txt +smiling--see!" pg3200.txt +smiling. pg31100.txt +smiling; pg31100.txt, pg3200.txt, pg100.txt +smilingly. pg31100.txt, pg100.txt +smirk. pg3200.txt +smirking pg3200.txt +smite! pg100.txt +smites pg100.txt +smiteth pg3200.txt +smith pg31100.txt, pg3200.txt, pg100.txt +smith!--noble pg3200.txt +smith's pg31100.txt +smith's." pg31100.txt +smith, pg31100.txt, pg3200.txt +smith,) pg3200.txt +smith. pg31100.txt, pg3200.txt, pg100.txt +smith." pg31100.txt +smith: pg3200.txt +smith; pg31100.txt +smith?" pg31100.txt, pg3200.txt +smith?--that pg31100.txt +smithfield pg100.txt +smithfield. pg100.txt +smithianus--said, pg3200.txt +smiths pg31100.txt, pg3200.txt +smithsonian pg3200.txt +smitten pg3200.txt +smock, pg100.txt +smock. pg100.txt +smock; pg100.txt +smocks; pg100.txt +smoke pg3200.txt +smoke! pg3200.txt +smoke, pg3200.txt, pg100.txt +smoke-blackened pg3200.txt +smoke-cloud, pg3200.txt +smoke-clouds pg3200.txt +smoke-house." pg3200.txt +smoke-stack pg3200.txt +smoke. pg3200.txt +smoke." pg3200.txt +smoke.' pg3200.txt +smoke; pg3200.txt +smoke? pg3200.txt, pg100.txt +smoked pg3200.txt +smoked, pg3200.txt +smokeless pg3200.txt +smoker's pg3200.txt +smoker; pg3200.txt +smokes pg3200.txt +smokes. pg100.txt +smokestacks, pg3200.txt +smoking pg3200.txt +smoking, pg3200.txt +smoking--remittance-men. pg3200.txt +smoking-room pg3200.txt +smoking. pg3200.txt +smoking; pg3200.txt +smokings pg3200.txt +smolder pg3200.txt +smoldering pg3200.txt +smollet pg3200.txt +smooth pg31100.txt, pg3200.txt, pg100.txt +smooth, pg3200.txt, pg100.txt +smooth-- pg31100.txt +smooth-bore pg3200.txt +smooth-shaven. pg3200.txt +smooth-voiced. pg3200.txt +smooth. pg3200.txt +smooth; pg100.txt +smoothed pg3200.txt +smoothed; pg3200.txt +smoother pg3200.txt +smoothing pg3200.txt +smoothly pg31100.txt, pg3200.txt, pg100.txt +smoothly, pg31100.txt +smoothly. pg3200.txt +smoothly: pg3200.txt +smoothly; pg31100.txt, pg3200.txt +smoothness pg31100.txt, pg3200.txt +smoothness, pg100.txt +smoothness. pg3200.txt +smote pg3200.txt, pg100.txt +smother pg3200.txt, pg100.txt +smother'd. pg100.txt +smother; pg100.txt +smothered pg3200.txt, pg100.txt +smothered, pg3200.txt +smothering pg3200.txt +smothering. pg3200.txt +smothery, pg3200.txt +smouch pg3200.txt +smouched pg3200.txt +smug, pg3200.txt +smug. pg3200.txt +smuggle pg3200.txt +smuggle; pg3200.txt +smuggled pg3200.txt +smuggling--lots pg3200.txt +smyrna pg3200.txt +smyrna, pg3200.txt +smyrna. pg3200.txt +smyrna." pg3200.txt +smyrnas pg3200.txt +smyrniote, pg3200.txt +smythe pg3200.txt +smythe, pg3200.txt +smythe. pg31100.txt +snack"--(sellers pg3200.txt +snaffle pg100.txt +snag pg3200.txt +snags pg3200.txt +snags, pg3200.txt +snail pg100.txt +snail! pg100.txt +snail-shell, pg3200.txt +snail. pg100.txt +snails: pg100.txt +snake pg3200.txt +snake!' pg100.txt +snake, pg3200.txt, pg100.txt +snake-skin pg3200.txt +snake. pg100.txt +snake; pg100.txt +snaked pg3200.txt +snakes pg3200.txt +snakes, pg3200.txt, pg100.txt +snakes. pg3200.txt +snap pg3200.txt, pg100.txt +snap, pg3200.txt +snap. pg3200.txt, pg100.txt +snapped pg3200.txt +snapping, pg3200.txt +snappishly: pg3200.txt +snaps.] pg3200.txt +snar'd, pg100.txt +snare pg100.txt +snare. pg3200.txt, pg100.txt +snare? pg100.txt +snares pg3200.txt +snarl: pg3200.txt +snatch pg3200.txt, pg100.txt +snatch'd. pg100.txt +snatch; pg100.txt +snatched pg3200.txt +snatches pg3200.txt +snatching pg3200.txt +sneak pg3200.txt +sneak. pg100.txt +sneaked pg3200.txt +sneakin' pg3200.txt +sneaking, pg3200.txt +sneaks pg3200.txt +sneaky. pg3200.txt +sneer pg3200.txt +sneer. pg31100.txt, pg3200.txt +sneer." pg3200.txt +sneered pg31100.txt +sneering pg31100.txt +sneers, pg31100.txt +sneeze. pg3200.txt +sneezing pg3200.txt +sneyd--you pg31100.txt +sniff, pg3200.txt +sniffed pg3200.txt +snip pg3200.txt +snipe pg100.txt +snobbery. pg3200.txt +snobs. pg3200.txt +snodgrass!" pg3200.txt +snodgrass--" pg3200.txt +snodgrass. pg3200.txt +snodgrass." pg3200.txt +snore pg3200.txt +snore. pg3200.txt +snored pg3200.txt +snores pg31100.txt +snores, pg100.txt +snores. pg100.txt +snoring pg3200.txt +snoring. pg3200.txt +snorings pg3200.txt +snort pg3200.txt +snort" pg3200.txt +snorting pg100.txt +snorting, pg3200.txt +snout pg3200.txt, pg100.txt +snow pg31100.txt, pg3200.txt, pg100.txt +snow, pg31100.txt, pg3200.txt, pg100.txt +snow- pg100.txt +snow-- pg3200.txt +snow--paris pg3200.txt +snow-blocks--look pg3200.txt +snow-clad pg3200.txt +snow-drifts pg3200.txt +snow-flakes! pg3200.txt +snow-line pg3200.txt +snow-mounds pg3200.txt +snow-mountains pg3200.txt +snow-peaks pg3200.txt +snow-pure, pg3200.txt +snow-shoe pg3200.txt +snow-storm pg3200.txt +snow-storm, pg3200.txt +snow-storm--and pg3200.txt +snow-storm. pg3200.txt +snow-summits pg3200.txt +snow-walled pg3200.txt +snow-white pg3200.txt, pg100.txt +snow. pg3200.txt, pg100.txt +snow." pg31100.txt +snow; pg100.txt +snow? pg100.txt +snowball. pg3200.txt +snowed pg3200.txt +snowflakes pg3200.txt +snowing pg31100.txt, pg3200.txt +snowing. pg3200.txt +snows pg3200.txt +snows, pg3200.txt +snows. pg3200.txt +snowstorm--private pg3200.txt +snowstorm. pg3200.txt +snowy pg3200.txt +snub pg3200.txt +snubbed pg3200.txt +snubbed, pg3200.txt +snubbed. pg3200.txt +snuff pg100.txt +snuff-bladder. pg3200.txt +snuff. pg100.txt +snuff; pg3200.txt, pg100.txt +snuff? pg100.txt +snuffed pg3200.txt +snuffling, pg3200.txt +snug pg31100.txt, pg3200.txt, pg100.txt +snug, pg31100.txt, pg100.txt +snuggery pg3200.txt +snuggled pg3200.txt +snugly pg3200.txt +so! pg31100.txt, pg3200.txt, pg100.txt +so!" pg31100.txt, pg3200.txt +so!") pg3200.txt +so!"-- pg31100.txt +so!' pg100.txt +so!- pg100.txt +so';- pg100.txt +so's pg3200.txt +so) pg3200.txt +so, pg31100.txt, pg3200.txt, pg100.txt +so," pg3200.txt +so,' pg100.txt +so,--" pg3200.txt +so- pg3200.txt, pg100.txt +so-- pg3200.txt +so--" pg3200.txt +so----" pg3200.txt +so--and pg3200.txt +so--as pg3200.txt +so--but pg3200.txt +so--don't pg3200.txt +so--farewell! pg3200.txt +so--i pg3200.txt +so--just pg3200.txt +so--r-o-o-meo! pg3200.txt +so--you's pg3200.txt +so-and-so pg3200.txt +so-called pg3200.txt +so-ho! pg100.txt +so-on pg3200.txt +so-so-so-so-so-so-so." pg3200.txt +so-so." pg3200.txt +so. pg31100.txt, pg3200.txt, pg100.txt +so." pg31100.txt, pg3200.txt +so.". pg3200.txt +so.' pg3200.txt +so.-- pg31100.txt +so.... pg3200.txt +so: pg31100.txt, pg3200.txt, pg100.txt +so; pg31100.txt, pg3200.txt, pg100.txt +so;--a pg31100.txt +so? pg31100.txt, pg3200.txt, pg100.txt +so?" pg31100.txt, pg3200.txt +so?' pg3200.txt, pg100.txt +so?--" pg31100.txt +soak pg3200.txt +soak, pg3200.txt +soak. pg3200.txt +soaked pg3200.txt +soaked, pg3200.txt +soaking pg3200.txt +soap pg3200.txt +soap!" pg3200.txt +soap!--' pg3200.txt +soap, pg3200.txt +soap-boilers. pg3200.txt +soap-fat. pg3200.txt +soap-kettle pg3200.txt +soap-suds, pg3200.txt +soap. pg3200.txt +soap; pg3200.txt +soar pg3200.txt, pg100.txt +soar, pg3200.txt, pg100.txt +soar. pg100.txt +soared pg3200.txt +soaring pg3200.txt +soaring, pg3200.txt +soaring. pg3200.txt +soars! pg100.txt +sob pg31100.txt, pg3200.txt +sob, pg3200.txt +sob. pg3200.txt +sobbing pg3200.txt +sobbing, pg3200.txt +sobbing. pg3200.txt +sobbings, pg3200.txt +sober pg31100.txt, pg3200.txt +sober, pg3200.txt +sober-colored pg3200.txt +sober. pg100.txt +sober." pg3200.txt +sober; pg3200.txt, pg100.txt +sobered pg3200.txt +soberly pg3200.txt +soberly: pg3200.txt +sobriety pg3200.txt +sobriety. pg31100.txt, pg100.txt +sobs pg3200.txt, pg100.txt +sobs, pg31100.txt, pg3200.txt +sobs. pg31100.txt, pg3200.txt +sobs: pg3200.txt +sociabilities pg3200.txt +sociability pg3200.txt +sociable pg3200.txt, pg100.txt +sociable. pg3200.txt +sociable? pg100.txt +social pg31100.txt, pg3200.txt +socialist-hated pg3200.txt +socialists pg3200.txt +socialists!' pg3200.txt +socialists, pg3200.txt +socially pg3200.txt +societies pg3200.txt +societies, pg3200.txt +societies--nurseries pg3200.txt +societies; pg100.txt +society pg31100.txt, pg3200.txt, pg100.txt +society! pg31100.txt +society's pg3200.txt +society, pg31100.txt, pg3200.txt, pg100.txt +society- pg100.txt +society--the pg3200.txt +society. pg31100.txt, pg3200.txt, pg100.txt +society." pg31100.txt, pg3200.txt +society.' pg3200.txt +society; pg31100.txt, pg3200.txt, pg100.txt +society? pg100.txt +society?" pg31100.txt +sock pg3200.txt +sock," pg3200.txt +sock; pg3200.txt +socket, pg3200.txt +sockets. pg3200.txt +socks. pg3200.txt +sod pg3200.txt +sod," pg3200.txt +sodded pg3200.txt +sodden pg3200.txt +soe'er pg100.txt +soe'er, pg100.txt +soever--" pg3200.txt +sofa pg31100.txt, pg3200.txt +sofa--partly pg3200.txt +sofa. pg31100.txt, pg3200.txt +sofa." pg31100.txt +sofa; pg3200.txt +sofas pg3200.txt +sofas, pg3200.txt +sofas. pg3200.txt +soft pg3200.txt, pg100.txt +soft! pg100.txt +soft, pg3200.txt, pg100.txt +soft- pg3200.txt +soft-sandaled, pg3200.txt +soft-spoken pg3200.txt +soft. pg3200.txt +soft." pg3200.txt +soften pg31100.txt, pg3200.txt +softened pg31100.txt, pg3200.txt +softened, pg31100.txt +softeneth pg3200.txt +softening pg3200.txt +softening. pg3200.txt +softens pg3200.txt +softer, pg3200.txt +softer. pg3200.txt +softest pg3200.txt +softly pg3200.txt, pg100.txt +softly, pg3200.txt +softly-- pg3200.txt +softly. pg3200.txt, pg100.txt +softly: pg3200.txt +softly; pg100.txt +softness pg31100.txt, pg3200.txt, pg100.txt +softness, pg31100.txt +software, pg100.txt +soil pg3200.txt, pg100.txt +soil'd pg100.txt +soil, pg3200.txt, pg100.txt +soil. pg3200.txt, pg100.txt +soil." pg3200.txt +soil; pg3200.txt +soiled pg3200.txt +soiled, pg3200.txt +soiled." pg3200.txt +soilure, pg100.txt +soissons. pg3200.txt +sojourn pg3200.txt +sojourn'd, pg100.txt +sojourn, pg100.txt +sojourn. pg3200.txt +sojourned pg3200.txt +sol pg100.txt +sola! pg100.txt +sola, pg100.txt +solace pg3200.txt, pg100.txt +solace, pg3200.txt, pg100.txt +solace. pg3200.txt +solace; pg100.txt +solacing pg3200.txt +solanio pg100.txt +solar pg3200.txt +solche pg3200.txt +sold pg3200.txt, pg100.txt +sold!" pg3200.txt +sold, pg3200.txt, pg100.txt +sold----" pg3200.txt +sold--that pg3200.txt +sold. pg3200.txt, pg100.txt +sold.' pg100.txt +sold; pg100.txt +solde pg3200.txt +solder pg3200.txt +soldering-iron; pg3200.txt +soldier pg3200.txt, pg100.txt +soldier!" pg3200.txt +soldier!) pg3200.txt +soldier's pg3200.txt +soldier's, pg100.txt +soldier) pg3200.txt +soldier, pg3200.txt, pg100.txt +soldier- pg100.txt +soldier--" pg3200.txt +soldier--come!" pg3200.txt +soldier--come!' pg3200.txt +soldier--like pg3200.txt +soldier-cap, pg3200.txt +soldier-ship pg100.txt +soldier-talk pg3200.txt +soldier-ways, pg3200.txt +soldier. pg3200.txt, pg100.txt +soldier." pg31100.txt +soldier.' pg3200.txt, pg100.txt +soldier: pg100.txt +soldier; pg3200.txt, pg100.txt +soldier? pg100.txt +soldier?" pg3200.txt +soldiering pg3200.txt +soldierly pg3200.txt +soldierly. pg3200.txt +soldiers pg31100.txt, pg3200.txt, pg100.txt +soldiers! pg100.txt +soldiers' pg3200.txt +soldiers, pg31100.txt, pg3200.txt, pg100.txt +soldiers. pg3200.txt, pg100.txt +soldiers." pg3200.txt +soldiers; pg3200.txt, pg100.txt +soldiers? pg100.txt +soldiers] pg100.txt +soldiership pg100.txt +soldiership, pg100.txt +soldiership. pg100.txt +soldiery pg3200.txt +soldiery. pg3200.txt +soldiery: pg3200.txt +sole pg31100.txt, pg3200.txt, pg100.txt +sole. pg100.txt +solely pg31100.txt, pg3200.txt +solely, pg3200.txt +solely. pg100.txt +solemn pg31100.txt, pg3200.txt +solemn, pg31100.txt, pg3200.txt +solemn: pg3200.txt +solemn; pg3200.txt +solemnest pg3200.txt +solemnities pg3200.txt +solemnities, pg3200.txt +solemnities. pg3200.txt, pg100.txt +solemnity pg31100.txt, pg3200.txt +solemnity, pg3200.txt, pg100.txt +solemnity-- pg3200.txt +solemnity. pg31100.txt, pg3200.txt, pg100.txt +solemnity: pg3200.txt +solemnity? pg3200.txt, pg100.txt +solemniz'd. pg100.txt +solemniz'd; pg100.txt +solemnize pg100.txt +solemnized pg100.txt +solemnized, pg100.txt +solemnly pg3200.txt, pg100.txt +solemnly. pg31100.txt +solemnly: pg3200.txt +soles pg3200.txt +soles. pg3200.txt, pg100.txt +solicit pg3200.txt, pg100.txt +solicit. pg31100.txt +solicitation pg31100.txt +solicitation." pg31100.txt +solicitations, pg3200.txt +solicited pg31100.txt, pg100.txt +soliciting pg100.txt +solicitings, pg100.txt +solicitor pg3200.txt +solicitor. pg100.txt +solicitous pg31100.txt, pg3200.txt +solicits pg100.txt +solicitude pg31100.txt, pg3200.txt +solicitude, pg31100.txt +solicitude. pg31100.txt, pg3200.txt +solicitudes pg31100.txt, pg3200.txt +solid pg31100.txt, pg3200.txt +solid!" pg3200.txt +solid, pg3200.txt +solid; pg3200.txt +solider, pg3200.txt +solider; pg3200.txt +solidest pg3200.txt +solidified pg3200.txt +solidified, pg3200.txt +solidity pg31100.txt +solidity, pg3200.txt +solidly pg3200.txt +soliloquising pg3200.txt +soliloquized pg3200.txt +soliloquized: pg3200.txt +soliloquy pg3200.txt +soliloquy, pg3200.txt +soliloquy," pg3200.txt +soliloquy." pg31100.txt, pg3200.txt +solitarily pg31100.txt +solitary pg31100.txt, pg3200.txt +solitary." pg31100.txt +solitude pg31100.txt, pg3200.txt +solitude! pg3200.txt +solitude, pg31100.txt, pg3200.txt +solitude--into pg3200.txt +solitude. pg31100.txt, pg3200.txt +solitude." pg3200.txt +solitudes pg3200.txt +solitudes! pg3200.txt +solitudes, pg3200.txt +solitudes--out pg3200.txt +solitudes. pg3200.txt +solitudes; pg3200.txt +soll pg3200.txt +sollermun pg3200.txt +soloist, pg3200.txt +solomon pg3200.txt +solomon's pg3200.txt +solomon. pg3200.txt +solos pg3200.txt +solstices, pg3200.txt +solus pg100.txt +solus. pg100.txt +solution pg3200.txt +solution. pg3200.txt +solution: pg3200.txt +solved pg3200.txt +solyman, pg100.txt +som'ers." pg3200.txt +sombre, pg3200.txt +some, pg31100.txt, pg3200.txt, pg100.txt +some--" pg3200.txt +some. pg31100.txt, pg3200.txt, pg100.txt +some." pg31100.txt, pg3200.txt +some.- pg100.txt +some.--so pg31100.txt +some; pg3200.txt +some] pg100.txt +somebody pg31100.txt, pg3200.txt, pg100.txt +somebody!" pg3200.txt +somebody's pg3200.txt +somebody, pg31100.txt, pg3200.txt +somebody--got pg3200.txt +somebody--though pg31100.txt +somebody. pg31100.txt, pg3200.txt, pg100.txt +somebody." pg31100.txt, pg3200.txt +somebody? pg100.txt +someday, pg3200.txt +somehow pg31100.txt, pg3200.txt +somehow, pg3200.txt +somehow--though pg3200.txt +somehow. pg3200.txt +somehow." pg3200.txt +somehow; pg3200.txt +someone's pg3200.txt +somers pg3200.txt +somers--she pg3200.txt +somersaults pg3200.txt +somerset pg100.txt +somerset, pg31100.txt, pg100.txt +somerset," pg31100.txt +somerset. pg100.txt +somerset." pg3200.txt +somerset; pg100.txt +somerset? pg100.txt +somerset?" pg3200.txt +somerset?' pg100.txt +somersetshire, pg31100.txt +somersetshire?" pg31100.txt +somerville pg100.txt +somethin' pg3200.txt +something pg31100.txt, pg3200.txt, pg100.txt +something! pg31100.txt +something!" pg3200.txt +something" pg3200.txt +something's pg3200.txt +something), pg3200.txt +something, pg31100.txt, pg3200.txt +something," pg3200.txt +something--it pg3200.txt +something. pg31100.txt, pg3200.txt, pg100.txt +something." pg31100.txt, pg3200.txt +something.' pg3200.txt +something; pg3200.txt +something?" pg3200.txt +somethingness pg3200.txt +somethings." pg3200.txt +sometime pg3200.txt, pg100.txt +sometime, pg3200.txt +sometime. pg3200.txt +sometime." pg3200.txt +sometime.' pg3200.txt +sometimes pg31100.txt, pg3200.txt, pg100.txt +sometimes, pg31100.txt, pg3200.txt, pg100.txt +sometimes,) pg3200.txt +sometimes--" pg3200.txt +sometimes--even pg3200.txt +sometimes--improved pg3200.txt +sometimes--not pg3200.txt +sometimes. pg3200.txt +sometimes." pg31100.txt, pg3200.txt +sometimes: pg3200.txt, pg100.txt +sometimes; pg3200.txt +sometimes?" pg3200.txt +somewhat pg31100.txt, pg3200.txt +somewhat, pg3200.txt +somewhat. pg3200.txt, pg100.txt +somewhat.' pg3200.txt +somewhere pg3200.txt +somewhere, pg31100.txt, pg3200.txt +somewhere,) pg3200.txt +somewhere. pg3200.txt +somewhere." pg3200.txt +somewhere: pg3200.txt +somewhere; pg3200.txt +somewheres pg3200.txt +somewheres!" pg3200.txt +somewheres, pg3200.txt +somewheres. pg3200.txt +somme. pg100.txt +somnambulist. pg3200.txt +son pg31100.txt, pg3200.txt, pg100.txt +son! pg3200.txt, pg100.txt +son!" pg3200.txt +son!'? pg100.txt +son'- pg100.txt +son'; pg100.txt +son's pg100.txt +son's. pg100.txt +son, pg31100.txt, pg3200.txt, pg100.txt +son- pg100.txt +son-- pg3200.txt +son--and pg31100.txt +son--not pg3200.txt +son-in-law pg31100.txt +son-in-law's pg100.txt +son-in-law, pg31100.txt, pg100.txt +son-in-law. pg100.txt +son-in-law." pg31100.txt +son-in-law; pg100.txt +son. pg31100.txt, pg3200.txt, pg100.txt +son." pg31100.txt, pg3200.txt +son.- pg100.txt +son: pg3200.txt, pg100.txt +son; pg31100.txt, pg3200.txt, pg100.txt +son;--"but pg31100.txt +son;--and pg31100.txt +son? pg100.txt +son?" pg31100.txt, pg3200.txt +song pg31100.txt, pg3200.txt, pg100.txt +song, pg31100.txt, pg3200.txt, pg100.txt +song-- pg3200.txt +song-bird pg3200.txt +song. pg31100.txt, pg3200.txt, pg100.txt +song." pg31100.txt, pg3200.txt +song: pg3200.txt +song; pg3200.txt, pg100.txt +song? pg3200.txt, pg100.txt +songs pg31100.txt, pg3200.txt, pg100.txt +songs, pg3200.txt, pg100.txt +songs. pg3200.txt +songsters pg3200.txt +sonless pg3200.txt +sonne pg3200.txt +sonnet pg100.txt +sonnet, pg31100.txt +sonnet. pg100.txt +sonnet] pg100.txt +sonneting. pg100.txt +sonnets pg100.txt +sonny!" pg3200.txt +sonora. pg3200.txt +sonora: pg3200.txt +sons pg31100.txt, pg3200.txt, pg100.txt +sons! pg100.txt +sons, pg3200.txt, pg100.txt +sons- pg100.txt +sons-in-law, pg100.txt +sons. pg3200.txt, pg100.txt +sons: pg100.txt +sons; pg3200.txt, pg100.txt +sons? pg100.txt +sonship pg3200.txt +soon pg31100.txt, pg3200.txt, pg100.txt +soon!" pg31100.txt, pg3200.txt +soon, pg31100.txt, pg3200.txt, pg100.txt +soon," pg31100.txt +soon- pg100.txt +soon--a pg31100.txt +soon--oh, pg3200.txt +soon--plenty." pg3200.txt +soon--there pg3200.txt +soon. pg31100.txt, pg3200.txt, pg100.txt +soon." pg31100.txt, pg3200.txt +soon.- pg100.txt +soon; pg100.txt +soon? pg100.txt +soon?" pg31100.txt, pg3200.txt +sooner pg31100.txt, pg3200.txt, pg100.txt +sooner, pg3200.txt +sooner. pg31100.txt, pg3200.txt, pg100.txt +sooner." pg31100.txt, pg3200.txt +sooner.' pg3200.txt +sooner; pg31100.txt +soonest pg100.txt +sooth pg31100.txt +sooth! pg100.txt +sooth' pg100.txt +sooth, pg3200.txt, pg100.txt +sooth- pg100.txt +sooth. pg100.txt +sooth." pg3200.txt +sooth: pg100.txt +sooth? pg3200.txt +soothe pg31100.txt, pg3200.txt +soothe; pg31100.txt +soothed pg31100.txt, pg3200.txt +soothed, pg31100.txt +soothed. pg31100.txt +soothing pg31100.txt, pg3200.txt +soothing!--oh, pg31100.txt +soothing, pg31100.txt, pg3200.txt +soothing; pg31100.txt +soothings pg31100.txt +soothsay. pg100.txt +soothsayer pg100.txt +soothsayer! pg100.txt +soothsayer, pg100.txt +soothsayer. pg100.txt +sop pg100.txt +sophia pg31100.txt +sophia, pg31100.txt +sophia. pg31100.txt +sophister. pg100.txt +sophomoric pg3200.txt +sophy pg31100.txt +sophy," pg31100.txt +sophy. pg100.txt +sopra pg3200.txt +soprano pg3200.txt +sor." pg3200.txt +sorcerer, pg3200.txt +sorceress, pg100.txt +sorceress. pg3200.txt +sorceress; pg100.txt +sorcery, pg3200.txt +sorcery. pg100.txt +sordid pg3200.txt +sore pg31100.txt, pg3200.txt, pg100.txt +sore, pg3200.txt, pg100.txt +sore-faced pg3200.txt +sore-throat pg31100.txt +sore. pg3200.txt, pg100.txt +sore; pg3200.txt +sorel. pg100.txt +sorely pg3200.txt, pg100.txt +sores pg3200.txt, pg100.txt +sores, pg3200.txt +sores. pg3200.txt +sores; pg100.txt +sorrel pg3200.txt +sorrel, pg3200.txt +sorrier. pg3200.txt +sorrow pg31100.txt, pg3200.txt, pg100.txt +sorrow! pg100.txt +sorrow!' pg100.txt +sorrow" pg3200.txt +sorrow, pg31100.txt, pg3200.txt, pg100.txt +sorrow- pg100.txt +sorrow--he pg3200.txt +sorrow-sowing pg3200.txt +sorrow. pg3200.txt, pg100.txt +sorrow." pg3200.txt +sorrow.' pg3200.txt +sorrow; pg100.txt +sorrow? pg100.txt +sorrowed. pg100.txt +sorrowful pg31100.txt, pg3200.txt +sorrowful. pg31100.txt, pg3200.txt +sorrowful? pg3200.txt +sorrowfully pg31100.txt, pg3200.txt +sorrowfully, pg3200.txt +sorrowfully-- pg3200.txt +sorrowfully: pg3200.txt +sorrowing pg3200.txt +sorrowing--if pg31100.txt +sorrows pg31100.txt, pg3200.txt, pg100.txt +sorrows, pg31100.txt, pg3200.txt, pg100.txt +sorrows. pg3200.txt, pg100.txt +sorrows." pg3200.txt +sorrows? pg100.txt +sorry pg31100.txt, pg3200.txt, pg100.txt +sorry, pg3200.txt, pg100.txt +sorry--sorry--sorry. pg3200.txt +sorry-looking pg3200.txt +sorry. pg31100.txt, pg3200.txt, pg100.txt +sorry." pg3200.txt +sorry; pg31100.txt, pg3200.txt, pg100.txt +sorry? pg100.txt +sort pg31100.txt, pg3200.txt, pg100.txt +sort! pg100.txt +sort, pg31100.txt, pg3200.txt, pg100.txt +sort- pg100.txt +sort--let pg3200.txt +sort--like pg3200.txt +sort--one pg3200.txt +sort--which pg31100.txt +sort. pg31100.txt, pg3200.txt, pg100.txt +sort." pg31100.txt +sort: pg3200.txt, pg100.txt +sort; pg31100.txt, pg3200.txt, pg100.txt +sort?" pg31100.txt +sorter pg3200.txt +sortie pg3200.txt +sorties, pg3200.txt +sorting pg3200.txt +sorts pg3200.txt +sorts, pg100.txt +sorts.--delightful pg31100.txt +sossius, pg100.txt +sot pg3200.txt, pg100.txt +sot! pg100.txt +sot, pg3200.txt +sot--and pg3200.txt +sot? pg100.txt +sotherton pg31100.txt +sotherton, pg31100.txt +sotherton. pg31100.txt +sotherton." pg31100.txt +sots. pg100.txt +soud! pg100.txt +sough, pg3200.txt +sought pg31100.txt, pg3200.txt, pg100.txt +sought, pg100.txt +sought. pg31100.txt, pg3200.txt +sought; pg100.txt +soul pg31100.txt, pg3200.txt, pg100.txt +soul! pg31100.txt, pg3200.txt, pg100.txt +soul!" pg3200.txt +soul!' pg3200.txt +soul's pg3200.txt +soul, pg31100.txt, pg3200.txt, pg100.txt +soul,' pg100.txt +soul- pg100.txt +soul--" pg3200.txt +soul--had pg3200.txt +soul--let pg3200.txt +soul-vex'd, pg100.txt +soul. pg31100.txt, pg3200.txt, pg100.txt +soul." pg31100.txt, pg3200.txt +soul; pg31100.txt, pg100.txt +soul? pg3200.txt, pg100.txt +soul?" pg3200.txt +souls pg31100.txt, pg3200.txt, pg100.txt +souls! pg3200.txt, pg100.txt +souls!) pg100.txt +souls!- pg100.txt +souls, pg3200.txt, pg100.txt +souls- pg100.txt +souls. pg3200.txt, pg100.txt +souls." pg3200.txt +souls: pg100.txt +souls; pg3200.txt, pg100.txt +souls? pg3200.txt, pg100.txt +souls?" pg3200.txt +soun', pg3200.txt +sound pg31100.txt, pg3200.txt, pg100.txt +sound! pg3200.txt +sound'- pg100.txt +sound'? pg100.txt +sound, pg3200.txt, pg100.txt +sound--let pg3200.txt +sound-hearted--and pg3200.txt +sound. pg31100.txt, pg3200.txt, pg100.txt +sound." pg3200.txt +sound.] pg3200.txt +sound: pg100.txt +sound; pg3200.txt, pg100.txt +sound? pg3200.txt, pg100.txt +sound] pg100.txt +sounded pg3200.txt, pg100.txt +sounded, pg3200.txt, pg100.txt +sounded. pg100.txt +sounded; pg100.txt +sounded] pg100.txt +soundest pg3200.txt +soundest; pg3200.txt +sounding pg3200.txt +sounding-boat!' pg3200.txt +sounding-boat.' pg3200.txt +sounding-yawl pg3200.txt +sounding. pg3200.txt, pg100.txt +sounding?' pg3200.txt +soundings pg3200.txt +soundless pg3200.txt +soundly pg100.txt +soundly. pg3200.txt, pg100.txt +soundly; pg100.txt +soundly? pg100.txt +soundpost? pg100.txt +sounds pg31100.txt, pg3200.txt, pg100.txt +sounds! pg100.txt +sounds, pg3200.txt, pg100.txt +sounds- pg100.txt +sounds. pg3200.txt, pg100.txt +sounds." pg3200.txt +sounds] pg100.txt +soup pg3200.txt +soup, pg31100.txt, pg3200.txt +soup-plate, pg3200.txt +soup. pg3200.txt +soup." pg31100.txt +sour pg3200.txt +sour, pg3200.txt, pg100.txt +sour-mash, pg3200.txt +sour. pg100.txt +sour." pg3200.txt +source pg31100.txt, pg3200.txt +source, pg3200.txt +source. pg31100.txt, pg3200.txt +source." pg3200.txt +source.' pg3200.txt +source? pg3200.txt +sources pg31100.txt, pg3200.txt +sources. pg3200.txt +soured pg31100.txt +souring pg3200.txt +south pg31100.txt, pg3200.txt, pg100.txt +south! pg3200.txt +south"- pg3200.txt +south, pg3200.txt, pg100.txt +south-african pg3200.txt +south-east, pg3200.txt +south. pg3200.txt, pg100.txt +south." pg31100.txt, pg3200.txt +south; pg3200.txt +south?" pg3200.txt +southampton pg3200.txt +southampton. pg31100.txt, pg100.txt +southampton." pg31100.txt +southampton; pg100.txt +southerly pg100.txt +southern pg3200.txt +southern?" pg3200.txt +southerner pg3200.txt +southerner. pg3200.txt +southerners-- pg3200.txt +southerners?" pg3200.txt +southward pg3200.txt +southward, pg3200.txt +southward--" pg3200.txt +southward. pg3200.txt, pg100.txt +southwark pg3200.txt, pg100.txt +southwark?" pg3200.txt +southwell; pg100.txt +southwest pg3200.txt +southwest. pg3200.txt +southwestern, pg3200.txt +sov'reigns; pg100.txt +sovereign pg3200.txt, pg100.txt +sovereign! pg100.txt +sovereign's pg3200.txt, pg100.txt +sovereign, pg3200.txt, pg100.txt +sovereign. pg3200.txt, pg100.txt +sovereign.' pg100.txt +sovereign; pg100.txt +sovereign? pg100.txt +sovereignty pg3200.txt, pg100.txt +sovereignty!" pg3200.txt +sovereignty, pg100.txt +sovereignty- pg100.txt +sovereignty--" pg3200.txt +sovereignty. pg3200.txt, pg100.txt +sovereignty; pg100.txt +sovereignty? pg100.txt +sovereignvours pg100.txt +sovrani pg3200.txt +sow pg3200.txt, pg100.txt +sow!" pg3200.txt +sow'd; pg100.txt +sow. pg3200.txt +soyez pg3200.txt +sozodont, pg3200.txt +space pg31100.txt, pg3200.txt, pg100.txt +space!" pg3200.txt +space, pg3200.txt, pg100.txt +space--rose pg3200.txt +space-selecting pg3200.txt +space. pg3200.txt, pg100.txt +space.) pg3200.txt +space; pg3200.txt, pg100.txt +spaced pg3200.txt +spaces pg3200.txt +spacing pg3200.txt +spacing, pg3200.txt +spacious pg3200.txt +spacious, pg3200.txt, pg100.txt +spaciousness pg31100.txt +spade pg3200.txt, pg100.txt +spade! pg100.txt +spade, pg100.txt +spade. pg100.txt +spain pg3200.txt +spain, pg3200.txt, pg100.txt +spain. pg3200.txt +spain." pg3200.txt +spain; pg3200.txt, pg100.txt +spain? pg3200.txt, pg100.txt +spak'st pg100.txt +spak'st? pg100.txt +spake pg100.txt +spake, pg100.txt +spake. pg100.txt +spake." pg3200.txt +spake: pg100.txt +spake? pg100.txt +span pg3200.txt, pg100.txt +span. pg100.txt +span; pg100.txt +spangled pg3200.txt +spaniard pg3200.txt, pg100.txt +spaniard, pg3200.txt +spaniard. pg100.txt +spaniard." pg3200.txt +spaniards pg3200.txt +spaniel, pg100.txt +spaniel-fawning. pg100.txt +spanish pg3200.txt +spanish, pg3200.txt +spanish-pouch- pg100.txt +spanishesmen pg3200.txt +spanking pg3200.txt +spar'd pg100.txt +spar'd, pg100.txt +spar'd. pg100.txt +spare pg31100.txt, pg3200.txt +spare, pg31100.txt, pg3200.txt +spare. pg3200.txt, pg100.txt +spare." pg31100.txt +spared pg31100.txt, pg3200.txt +spared"... pg31100.txt +spared, pg31100.txt, pg3200.txt +spared--for pg3200.txt +spared. pg31100.txt, pg3200.txt +spared." pg31100.txt, pg3200.txt +spared; pg3200.txt +spares; pg100.txt +sparing pg3200.txt +sparingly. pg31100.txt +spark pg3200.txt, pg100.txt +spark--i pg3200.txt +spark. pg100.txt +sparkle pg3200.txt +sparkled pg3200.txt +sparklin'!' pg3200.txt +sparkling pg31100.txt, pg3200.txt +sparks pg3200.txt, pg100.txt +sparks. pg3200.txt +sparks; pg100.txt +sparred pg3200.txt +sparrow's pg3200.txt +sparrow, pg100.txt +sparrow. pg100.txt +sparrows, pg100.txt +spasm pg3200.txt +spasmodic pg3200.txt +spat pg3200.txt +spate. pg3200.txt +spattered pg3200.txt +spattered, pg3200.txt +spaulding pg3200.txt +spaulding, pg3200.txt +spavin pg100.txt +spawn-- pg3200.txt +speak pg31100.txt, pg3200.txt, pg100.txt +speak! pg3200.txt, pg100.txt +speak!" pg3200.txt +speak!- pg100.txt +speak!--whereas, pg3200.txt +speak'st pg100.txt +speak'st, pg100.txt +speak'st. pg100.txt +speak) pg3200.txt +speak), pg31100.txt +speak, pg31100.txt, pg3200.txt, pg100.txt +speak- pg100.txt +speak-- pg31100.txt, pg3200.txt +speak--and pg3200.txt +speak--he pg3200.txt +speak--not pg3200.txt +speak. pg31100.txt, pg3200.txt, pg100.txt +speak." pg31100.txt, pg3200.txt +speak: pg3200.txt, pg100.txt +speak; pg31100.txt, pg3200.txt, pg100.txt +speak;--sure pg31100.txt +speak? pg31100.txt, pg3200.txt, pg100.txt +speak?" pg3200.txt +speaker pg3200.txt +speaker!" pg3200.txt +speaker's pg3200.txt +speaker's, pg3200.txt +speaker, pg3200.txt +speaker. pg31100.txt +speaker." pg3200.txt +speaker; pg100.txt +speaker? pg100.txt +speakers pg3200.txt +speakers. pg3200.txt +speakin'--" pg3200.txt +speaking pg31100.txt, pg3200.txt, pg100.txt +speaking! pg100.txt +speaking, pg31100.txt, pg3200.txt +speaking--" pg31100.txt +speaking--"perhaps, pg31100.txt +speaking-tube pg3200.txt +speaking. pg31100.txt, pg3200.txt, pg100.txt +speaking." pg3200.txt +speaking; pg3200.txt +speaking? pg3200.txt +speaks pg31100.txt, pg3200.txt, pg100.txt +speaks! pg100.txt +speaks, pg100.txt +speaks- pg100.txt +speaks. pg100.txt +speaks: pg3200.txt +speaks? pg3200.txt, pg100.txt +spear pg3200.txt +spear! pg3200.txt +spear, pg3200.txt, pg100.txt +spear. pg3200.txt, pg100.txt +speargrass pg100.txt +spearheads; pg3200.txt +spears pg3200.txt +spears! pg3200.txt +spears, pg100.txt +spears. pg3200.txt +special pg31100.txt, pg3200.txt +special, pg100.txt +special-blest, pg100.txt +specialist--who pg3200.txt +specialists pg3200.txt +specialized pg3200.txt +specially pg3200.txt +specially, pg3200.txt +specially. pg100.txt +specialties. pg3200.txt +specialty pg3200.txt +specialty, pg3200.txt +specialty. pg3200.txt +specie. pg3200.txt +species pg3200.txt +species, pg3200.txt +species. pg3200.txt +species; pg3200.txt +specific pg3200.txt +specifications, pg3200.txt +specify pg100.txt +specify- pg100.txt +specimen pg3200.txt +specimen. pg3200.txt +specimens pg31100.txt, pg3200.txt +specimens--hawley, pg3200.txt +specimens: pg3200.txt +speck pg3200.txt +speck. pg3200.txt +speck." pg3200.txt +speckled pg3200.txt +specks pg31100.txt, pg3200.txt +spectacle pg3200.txt +spectacle! pg3200.txt, pg100.txt +spectacle!" pg3200.txt +spectacle, pg3200.txt +spectacle--but pg3200.txt +spectacle. pg3200.txt, pg100.txt +spectacle." pg3200.txt +spectacle; pg3200.txt +spectacle? pg100.txt +spectacles pg31100.txt, pg3200.txt, pg100.txt +spectacles--and pg3200.txt +spectacles. pg31100.txt, pg3200.txt, pg100.txt +spectacles? pg100.txt +spectacles] pg3200.txt +spectacular pg3200.txt +spectator pg3200.txt +spectator. pg3200.txt +spectators pg3200.txt +spectators, pg3200.txt, pg100.txt +spectators. pg3200.txt +spectators; pg3200.txt +specter pg3200.txt +specter? pg3200.txt +spectral pg3200.txt +spectre, pg3200.txt +spectre-knight pg3200.txt +speculate pg3200.txt +speculate: pg3200.txt +speculating pg3200.txt +speculation pg31100.txt, pg3200.txt +speculation, pg31100.txt +speculation- pg100.txt +speculation. pg31100.txt, pg3200.txt +speculation." pg3200.txt +speculations pg31100.txt, pg100.txt +speculations, pg31100.txt +speculations--i pg3200.txt +speculative pg3200.txt +speculator pg3200.txt +speculators pg3200.txt +speculators," pg3200.txt +speculators. pg3200.txt +speculators; pg3200.txt +sped pg3200.txt, pg100.txt +sped, pg3200.txt +sped. pg3200.txt, pg100.txt +sped." pg3200.txt +sped.' pg100.txt +speech pg31100.txt, pg3200.txt, pg100.txt +speech! pg100.txt +speech!" pg3200.txt +speech'- pg100.txt +speech, pg31100.txt, pg3200.txt, pg100.txt +speech-- pg3200.txt +speech--. pg31100.txt +speech--france pg3200.txt +speech--it pg3200.txt +speech-making pg3200.txt +speech. pg31100.txt, pg3200.txt, pg100.txt +speech." pg3200.txt +speech: pg3200.txt, pg100.txt +speech; pg3200.txt, pg100.txt +speech? pg3200.txt, pg100.txt +speech?" pg31100.txt +speeche pg3200.txt +speeche. pg3200.txt +speeches pg31100.txt, pg3200.txt, pg100.txt +speeches, pg3200.txt, pg100.txt +speeches- pg100.txt +speeches--then pg3200.txt +speeches. pg31100.txt, pg3200.txt +speeches." pg3200.txt +speeches; pg3200.txt +speeches? pg100.txt +speechless pg3200.txt +speechless. pg31100.txt, pg3200.txt, pg100.txt +speed pg31100.txt, pg3200.txt, pg100.txt +speed! pg3200.txt, pg100.txt +speed!" pg3200.txt +speed, pg3200.txt, pg100.txt +speed- pg100.txt +speed-- pg3200.txt +speed. pg3200.txt, pg100.txt +speed." pg3200.txt +speed: pg100.txt +speed; pg3200.txt, pg100.txt +speed? pg100.txt +speeded pg3200.txt +speedily pg31100.txt, pg3200.txt, pg100.txt +speedily, pg100.txt +speedily. pg3200.txt, pg100.txt +speedily." pg31100.txt +speedily; pg31100.txt +speeding pg3200.txt +speeds pg100.txt +speeds; pg100.txt +speedy pg31100.txt, pg100.txt +speedy, pg100.txt +speedy; pg31100.txt +speens, pg100.txt +spell pg31100.txt, pg3200.txt +spell, pg3200.txt +spell--" pg3200.txt +spell-bound. pg3200.txt +spell-stopp'd. pg100.txt +spell. pg3200.txt, pg100.txt +spell.) pg3200.txt +spell: pg3200.txt +spell; pg3200.txt, pg100.txt +spelled pg3200.txt +spelling pg3200.txt +spelling, pg3200.txt +spelling-book pg3200.txt +spelling." pg3200.txt +spells pg3200.txt +spells. pg3200.txt +spelt pg100.txt +spencer pg3200.txt +spend pg31100.txt, pg3200.txt, pg100.txt +spend, pg3200.txt, pg100.txt +spend. pg100.txt +spend; pg31100.txt, pg100.txt +spend? pg100.txt +spend?" pg3200.txt +spending pg31100.txt, pg3200.txt +spends pg3200.txt, pg100.txt +spendthrifts pg3200.txt +spent pg31100.txt, pg3200.txt, pg100.txt +spent! pg100.txt +spent, pg100.txt +spent. pg31100.txt, pg3200.txt, pg100.txt +spent: pg100.txt +spent; pg100.txt +spent? pg100.txt +sperit, pg3200.txt +sperling, pg3200.txt +spher'd pg100.txt +sphere pg31100.txt, pg3200.txt, pg100.txt +sphere, pg3200.txt, pg100.txt +sphere. pg31100.txt, pg100.txt +sphere; pg100.txt +sphere? pg3200.txt +spheres pg3200.txt, pg100.txt +spheres, pg100.txt +spheres. pg3200.txt, pg100.txt +sphinx!" pg3200.txt +sphynx pg3200.txt +sphynx. pg3200.txt +spices pg3200.txt, pg100.txt +spices, pg3200.txt +spick-and-span pg3200.txt +spicy pg3200.txt +spider pg3200.txt, pg100.txt +spider, pg3200.txt, pg100.txt +spider-like, pg100.txt +spider-strand. pg3200.txt +spider-web pg3200.txt +spider-webbed pg3200.txt +spider. pg3200.txt, pg100.txt +spiders pg3200.txt +spiders--tarantulas--and pg3200.txt +spiders." pg3200.txt +spidery, pg3200.txt +spied pg3200.txt, pg100.txt +spies pg3200.txt +spies, pg100.txt +spies. pg100.txt +spignel, pg3200.txt +spigot pg100.txt +spike pg3200.txt +spikes, pg3200.txt +spile pg3200.txt +spill pg3200.txt, pg100.txt +spill'd pg100.txt +spilled pg3200.txt +spills pg3200.txt +spilt pg3200.txt +spilt. pg100.txt +spilt; pg100.txt +spin pg3200.txt, pg100.txt +spin. pg3200.txt +spin.' pg100.txt +spin? pg3200.txt +spinal pg3200.txt +spindles pg3200.txt +spine pg3200.txt +spine! pg3200.txt +spine. pg3200.txt +spined pg3200.txt +spines pg3200.txt +spinner pg3200.txt +spinnet pg3200.txt +spinning-stick pg3200.txt +spiral-twist pg3200.txt +spirals, pg3200.txt +spire, pg3200.txt +spires pg3200.txt +spires--over pg3200.txt +spires--twenty-one pg3200.txt +spires. pg3200.txt +spirit pg31100.txt, pg3200.txt, pg100.txt +spirit! pg31100.txt, pg3200.txt, pg100.txt +spirit!" pg31100.txt +spirit's pg3200.txt +spirit, pg31100.txt, pg3200.txt, pg100.txt +spirit--" pg3200.txt +spirit--especially pg31100.txt +spirit-contenting pg3200.txt +spirit. pg31100.txt, pg3200.txt, pg100.txt +spirit." pg31100.txt, pg3200.txt +spirit: pg3200.txt +spirit; pg31100.txt, pg3200.txt, pg100.txt +spirit? pg100.txt +spirit?" pg31100.txt, pg3200.txt +spirited pg3200.txt +spirited, pg3200.txt +spiritless pg3200.txt +spiritless, pg100.txt +spiritless--" pg3200.txt +spirits pg31100.txt, pg3200.txt, pg100.txt +spirits! pg31100.txt, pg100.txt +spirits!- pg100.txt +spirits, pg31100.txt, pg3200.txt, pg100.txt +spirits," pg31100.txt +spirits. pg31100.txt, pg3200.txt, pg100.txt +spirits." pg31100.txt, pg100.txt +spirits... pg3200.txt +spirits: pg31100.txt, pg100.txt +spirits; pg31100.txt, pg3200.txt, pg100.txt +spirits? pg100.txt +spirits?" pg3200.txt +spiritual pg3200.txt +spiritual, pg3200.txt +spiritual. pg3200.txt +spiritual." pg3200.txt +spiritual.' pg3200.txt +spiritualism pg3200.txt +spiritualism, pg3200.txt +spiritualist, pg3200.txt +spiritualist. pg3200.txt +spiritualists pg3200.txt +spiritualists, pg3200.txt +spirituality pg3200.txt +spiritually pg3200.txt +spiritually, pg3200.txt +spiritually. pg3200.txt +spiritualty pg100.txt +spit pg3200.txt, pg100.txt +spit'n pg3200.txt +spit. pg100.txt +spit; pg100.txt +spital pg100.txt +spite pg31100.txt, pg3200.txt, pg100.txt +spite! pg100.txt +spite, pg3200.txt, pg100.txt +spite. pg31100.txt, pg100.txt +spite: pg31100.txt +spite; pg3200.txt +spiteful pg100.txt +spites! pg100.txt +spithead; pg31100.txt +spits pg3200.txt, pg100.txt +spits, pg3200.txt +spitting pg3200.txt +spitting, pg3200.txt +spittoon pg3200.txt +spitzbergen. pg3200.txt +spitzka pg3200.txt +spl-endid! pg3200.txt +splashed pg3200.txt +splashing pg3200.txt +splashing-board, pg31100.txt +spleen pg100.txt +spleen, pg100.txt +spleen. pg31100.txt, pg100.txt +spleens, pg100.txt +spleens- pg100.txt +splendid pg3200.txt +splendid! pg3200.txt +splendid!") pg3200.txt +splendid. pg3200.txt +splendidly pg3200.txt +splendidly. pg3200.txt +splendor pg31100.txt, pg3200.txt +splendor, pg3200.txt +splendor. pg3200.txt +splendor; pg3200.txt +splendors pg3200.txt +splendors!" pg3200.txt +splendors, pg3200.txt +splendors. pg3200.txt +splendour pg3200.txt +splendours pg3200.txt +splinter pg3200.txt +splinter; pg100.txt +splintered pg3200.txt +splinters pg3200.txt +split pg3200.txt, pg100.txt +split!] pg100.txt +split, pg100.txt +split- pg100.txt +split. pg100.txt +splits pg3200.txt +splits--139. pg3200.txt +splitting pg3200.txt +splotch pg3200.txt +splotchy pg3200.txt +spoil pg31100.txt, pg3200.txt, pg100.txt +spoil'd. pg100.txt +spoil, pg3200.txt, pg100.txt +spoil. pg3200.txt, pg100.txt +spoil? pg3200.txt, pg100.txt +spoiled pg3200.txt +spoiled, pg3200.txt +spoiled. pg3200.txt +spoils pg3200.txt, pg100.txt +spoils, pg3200.txt, pg100.txt +spoils; pg100.txt +spoilt pg31100.txt +spoilt!" pg31100.txt +spok'st pg100.txt +spoke pg31100.txt, pg3200.txt, pg100.txt +spoke! pg100.txt +spoke" pg3200.txt +spoke) pg31100.txt +spoke, pg31100.txt, pg3200.txt, pg100.txt +spoke-- pg31100.txt +spoke. pg31100.txt, pg3200.txt, pg100.txt +spoke.) pg31100.txt +spoke: pg3200.txt +spoke; pg3200.txt, pg100.txt +spoke? pg100.txt +spoken pg31100.txt, pg3200.txt, pg100.txt +spoken!' pg3200.txt +spoken, pg31100.txt, pg3200.txt, pg100.txt +spoken--i pg3200.txt +spoken--they pg3200.txt +spoken. pg31100.txt, pg3200.txt, pg100.txt +spoken." pg3200.txt +spoken: pg100.txt +spoken; pg3200.txt +spoken? pg100.txt +spokesman pg3200.txt +spokesman, pg31100.txt +spokesmen. pg3200.txt +sponge pg3200.txt +sponge--separated pg3200.txt +sponge. pg3200.txt, pg100.txt +sponge?" pg3200.txt +spongy. pg3200.txt +spontaneity pg3200.txt +spontaneous pg3200.txt +spontaneous, pg31100.txt, pg3200.txt +spontaneously pg31100.txt +spool pg3200.txt +spoon pg3200.txt, pg100.txt +spoon, pg3200.txt +spoon--wash pg3200.txt +spoon-basket pg3200.txt +spoon-meat, pg100.txt +spoon. pg31100.txt, pg100.txt +spoon; pg3200.txt +spoonful pg3200.txt +spoons pg3200.txt +spoons, pg3200.txt, pg100.txt +spoons. pg3200.txt, pg100.txt +sport pg31100.txt, pg3200.txt, pg100.txt +sport) pg100.txt +sport, pg31100.txt, pg3200.txt, pg100.txt +sport- pg100.txt +sport. pg100.txt +sport." pg3200.txt +sport; pg100.txt +sport? pg100.txt +sportful! pg100.txt +sporting pg31100.txt, pg3200.txt +sporting-place. pg100.txt +sports pg31100.txt, pg3200.txt +sports, pg3200.txt +sports. pg100.txt +sports; pg100.txt +sportsman, pg31100.txt +spos'n pg3200.txt +spot pg31100.txt, pg3200.txt, pg100.txt +spot"--$1,000 pg3200.txt +spot, pg31100.txt, pg3200.txt +spot--looked pg3200.txt +spot. pg31100.txt, pg3200.txt, pg100.txt +spot." pg3200.txt +spot; pg31100.txt, pg3200.txt +spot?--yes. pg31100.txt +spotless pg31100.txt, pg3200.txt, pg100.txt +spots pg3200.txt, pg100.txt +spots, pg3200.txt +spots. pg3200.txt, pg100.txt +spots; pg3200.txt +spotted pg31100.txt, pg3200.txt, pg100.txt +spotted. pg100.txt +spousal pg100.txt +spouse: pg100.txt +spout pg3200.txt, pg100.txt +spout." pg3200.txt +spouting pg3200.txt +spouting--is pg3200.txt +spouts, pg100.txt +spouts- pg100.txt +sprach pg3200.txt +sprache. pg3200.txt +sprained pg3200.txt +sprang pg3200.txt +sprang'st, pg100.txt +sprang. pg31100.txt +sprawl pg3200.txt +sprawl- pg100.txt +sprawling pg3200.txt +sprawling, pg3200.txt +spray pg3200.txt +spray, pg3200.txt +spray. pg3200.txt +sprays pg3200.txt, pg100.txt +sprays; pg100.txt +spread pg31100.txt, pg3200.txt, pg100.txt +spread, pg3200.txt, pg100.txt +spread. pg100.txt +spread; pg100.txt +spreading pg31100.txt, pg3200.txt +spreading. pg3200.txt +spreads pg3200.txt +spree! pg3200.txt +spree. pg3200.txt +sprig, pg3200.txt +sprigged, pg31100.txt +sprightly pg31100.txt +sprightly, pg100.txt +spring pg31100.txt, pg3200.txt, pg100.txt +spring! pg100.txt +spring, pg31100.txt, pg3200.txt, pg100.txt +spring- pg100.txt +spring--precisely pg31100.txt +spring-board pg3200.txt +spring-board: pg3200.txt +spring. pg31100.txt, pg3200.txt, pg100.txt +spring." pg31100.txt, pg3200.txt +spring: pg100.txt +spring; pg31100.txt +spring? pg100.txt +spring?" pg31100.txt, pg3200.txt +springing pg31100.txt, pg3200.txt +springing, pg31100.txt +springs pg3200.txt, pg100.txt +springs, pg3200.txt +springs--two pg3200.txt +springs...1,036 pg3200.txt +springs; pg100.txt +springy pg3200.txt +sprinkle pg3200.txt +sprinkled pg3200.txt +sprinkles, pg3200.txt +sprinkling pg3200.txt +sprite pg3200.txt, pg100.txt +sprite, pg100.txt +sprites pg100.txt +sprites, pg100.txt +sprites. pg100.txt +spruce-beer pg3200.txt +sprung pg3200.txt, pg100.txt +sprung, pg31100.txt, pg3200.txt +sprung. pg3200.txt +sprung; pg31100.txt +spry." pg3200.txt +spryer, pg3200.txt +spume pg3200.txt +spun pg3200.txt +spun. pg100.txt +spunk-water." pg3200.txt +spur pg3200.txt, pg100.txt +spur, pg100.txt +spur. pg3200.txt +spur; pg3200.txt, pg100.txt +spur? pg100.txt +spurious. pg3200.txt +spurn pg3200.txt, pg100.txt +spurn. pg100.txt +spurned pg31100.txt, pg3200.txt +spurned. pg3200.txt +spurns pg100.txt +spurring pg3200.txt +spurs pg3200.txt, pg100.txt +spurs, pg100.txt +spurting pg3200.txt +spurts pg3200.txt +sputter pg3200.txt +spy pg3200.txt, pg100.txt +spy, pg100.txt +spy-glass, pg3200.txt +spy-glass. pg3200.txt +spy. pg100.txt +spying pg3200.txt +squad pg3200.txt +squad?" pg3200.txt +squadron pg3200.txt +squads pg3200.txt +squall, pg3200.txt +squally pg3200.txt +squally, pg3200.txt +squalor pg3200.txt +squander pg3200.txt +squandered pg3200.txt +squandering pg3200.txt +squanderings pg3200.txt +square pg31100.txt, pg3200.txt +square, pg31100.txt, pg3200.txt +square--and pg3200.txt +square--where pg3200.txt +square. pg31100.txt, pg3200.txt, pg100.txt +square." pg3200.txt +square; pg3200.txt, pg100.txt +squared pg3200.txt +squared. pg3200.txt +squarely pg3200.txt +squarely, pg3200.txt +squarely." pg3200.txt +squarer pg100.txt +squares pg3200.txt +squares--" pg3200.txt +squat pg3200.txt +squatter pg3200.txt +squatter, pg3200.txt +squatters pg3200.txt +squatters. pg3200.txt +squatting, pg3200.txt +squaw pg3200.txt +squeaking pg3200.txt +squealing pg3200.txt +squealing. pg3200.txt +squeeze pg3200.txt +squeeze, pg3200.txt +squeezed pg3200.txt +squelch pg3200.txt +squelched pg3200.txt +squier, pg100.txt +squier. pg100.txt +squire pg3200.txt, pg100.txt +squire, pg100.txt +squire- pg100.txt +squire. pg100.txt +squires, pg100.txt +squires." pg3200.txt +squires; pg100.txt +squirm! pg3200.txt +squirm." pg3200.txt +squirmed pg3200.txt +squirming, pg3200.txt +squirrel pg3200.txt +squirrel's pg3200.txt +squirrel; pg3200.txt +squirt pg3200.txt +squirt-guns pg3200.txt +sr., pg3200.txt +st. pg31100.txt, pg3200.txt +staachfield, pg3200.txt +stab pg3200.txt +stab! pg3200.txt +stab, pg100.txt +stab. pg3200.txt, pg100.txt +stabb'd pg100.txt +stabb'd; pg100.txt +stabbed pg3200.txt +stabbing pg3200.txt, pg100.txt +stabbing. pg100.txt +stabboard, pg3200.txt +stabboard. pg3200.txt +stability pg31100.txt +stability, pg31100.txt +stable pg31100.txt, pg3200.txt, pg100.txt +stable, pg3200.txt +stable-room pg3200.txt +stable-yard pg31100.txt +stable. pg3200.txt, pg100.txt +stable." pg31100.txt +stableness, pg100.txt +stables pg31100.txt, pg100.txt +stables, pg3200.txt +stables--and pg3200.txt +stables. pg31100.txt, pg3200.txt +stabs pg100.txt +stabs] pg100.txt +staccato pg3200.txt +stacked pg3200.txt +stacks pg3200.txt +stadtverordnetenversammlungen. pg3200.txt +staff pg3200.txt, pg100.txt +staff, pg3200.txt, pg100.txt +staff. pg31100.txt, pg3200.txt, pg100.txt +staff? pg100.txt +stafford pg100.txt +stafford. pg3200.txt +staffordshire, pg31100.txt +stag, pg100.txt +stag. pg3200.txt, pg100.txt +stage pg31100.txt, pg3200.txt, pg100.txt +stage, pg31100.txt, pg3200.txt, pg100.txt +stage-- pg3200.txt +stage--by-play pg3200.txt +stage-coach pg3200.txt +stage-coach, pg31100.txt +stage-driver pg3200.txt +stage-drivers pg3200.txt +stage-fright pg3200.txt +stage-horses pg3200.txt +stage-manager, pg3200.txt +stage-office pg3200.txt +stage-properties, pg3200.txt +stage-scene pg3200.txt +stage. pg31100.txt, pg3200.txt, pg100.txt +stage." pg31100.txt, pg3200.txt +stage.' pg3200.txt +stage; pg100.txt +stage?' pg3200.txt +stage] pg100.txt +stagecoach pg3200.txt +stagecoach. pg3200.txt +stagecoaching. pg3200.txt +staged. pg3200.txt +stagers, pg3200.txt +stages pg3200.txt +stages, pg3200.txt +stages--well, pg3200.txt +stagger pg100.txt +staggered pg31100.txt, pg3200.txt +staggering pg31100.txt, pg3200.txt +staggers pg3200.txt +staggers. pg3200.txt +stagnant pg3200.txt +stags, pg100.txt +staid pg31100.txt, pg3200.txt +staid, pg3200.txt +staid. pg31100.txt +staid." pg31100.txt +staid? pg31100.txt +stain pg3200.txt, pg100.txt +stain'd pg100.txt +stain'd, pg100.txt +stain'd. pg100.txt +stain'd? pg100.txt +stain, pg3200.txt, pg100.txt +stain. pg100.txt +stained pg3200.txt +stained, pg100.txt +staines pg31100.txt +staines. pg100.txt +staineth. pg100.txt +stainless pg3200.txt +stains pg3200.txt, pg100.txt +stains, pg100.txt +stair, pg100.txt +stair-rods, pg3200.txt +stair. pg3200.txt +staircase pg31100.txt +staircase, pg31100.txt, pg3200.txt +staircase. pg31100.txt, pg3200.txt +staircase; pg3200.txt +staircase?" pg31100.txt +stairs pg31100.txt, pg3200.txt, pg100.txt +stairs, pg31100.txt, pg3200.txt +stairs--susy pg3200.txt +stairs. pg31100.txt, pg3200.txt, pg100.txt +stairs." pg31100.txt, pg3200.txt +stairs; pg31100.txt, pg3200.txt +stairs? pg100.txt +stairway pg3200.txt +stairway. pg3200.txt +stairways pg3200.txt +stairways; pg3200.txt +stake pg31100.txt, pg3200.txt +stake!" pg3200.txt +stake, pg3200.txt, pg100.txt +stake--your pg31100.txt +stake. pg31100.txt, pg3200.txt, pg100.txt +stake." pg3200.txt +stake; pg3200.txt, pg100.txt +stake?" pg31100.txt +staked pg3200.txt +stakes pg3200.txt +stakes--with pg3200.txt +stalactites pg3200.txt +stale pg100.txt +stale, pg100.txt +stale- pg100.txt +stale. pg100.txt +stalk, pg100.txt +stalked pg3200.txt +stall, pg100.txt +stalls, pg100.txt +stalwart pg3200.txt +stamboul pg3200.txt +stamboul. pg3200.txt +stamford, pg3200.txt +stammer, pg3200.txt +stammered pg3200.txt +stammeringly: pg3200.txt +stamp pg3200.txt, pg100.txt +stamp'd pg100.txt +stamp, pg100.txt +stamp. pg3200.txt, pg100.txt +stamp." pg31100.txt +stamp; pg100.txt +stamped pg31100.txt, pg3200.txt +stampede, pg3200.txt +stampede. pg3200.txt +stamps pg3200.txt +stamps. pg3200.txt +stan' pg3200.txt +stan'." pg3200.txt +stance, pg3200.txt +stand pg31100.txt, pg3200.txt, pg100.txt +stand! pg100.txt +stand!' pg100.txt +stand'st pg100.txt +stand'st, pg100.txt +stand'st; pg100.txt +stand) pg3200.txt +stand, pg3200.txt, pg100.txt +stand- pg100.txt +stand-- pg3200.txt +stand--st. pg3200.txt +stand. pg31100.txt, pg3200.txt, pg100.txt +stand." pg3200.txt +stand.' pg100.txt +stand.) pg3200.txt +stand; pg3200.txt, pg100.txt +stand? pg31100.txt, pg3200.txt, pg100.txt +stand?" pg3200.txt +standard pg31100.txt, pg3200.txt +standard") pg3200.txt +standard, pg3200.txt +standard-bearer, pg3200.txt +standard-bearer." pg3200.txt +standard-oil pg3200.txt +standard. pg100.txt +standard?" pg3200.txt +standard] pg3200.txt +standards pg3200.txt +standards. pg3200.txt +standby. pg3200.txt +standers, pg100.txt +standers-by pg100.txt +standeth pg3200.txt, pg100.txt +standing pg31100.txt, pg3200.txt, pg100.txt +standing! pg3200.txt +standing, pg3200.txt, pg100.txt +standing. pg3200.txt +standing; pg3200.txt +standing?" pg31100.txt +standpoint pg3200.txt +standpoint--for pg3200.txt +standpoint. pg3200.txt +standpoint." pg3200.txt +stands pg3200.txt, pg100.txt +stands, pg3200.txt, pg100.txt +stands. pg3200.txt, pg100.txt +stands: pg100.txt +stands; pg3200.txt, pg100.txt +stands? pg3200.txt +stands?" pg31100.txt +standstill pg3200.txt +standstill, pg3200.txt +standstill. pg3200.txt +stane." pg3200.txt +stanislaus, pg3200.txt +stanley pg3200.txt, pg100.txt +stanley, pg100.txt +stanley. pg3200.txt, pg100.txt +stanley? pg100.txt +stanly's pg31100.txt +stanton pg3200.txt +stanza, pg3200.txt +stanza. pg3200.txt +stanza.] pg3200.txt +stanza; pg31100.txt +stanzas pg3200.txt +stanzas--but pg3200.txt +stanzos? pg100.txt +staple pg3200.txt +staples pg100.txt +star pg3200.txt, pg100.txt +star! pg100.txt +star, pg100.txt +star-dust pg3200.txt +star-gazing." pg31100.txt +star-spangled pg3200.txt +star. pg3200.txt, pg100.txt +star; pg100.txt +starboard pg3200.txt +starboard! pg3200.txt +starboard," pg3200.txt +starchy pg3200.txt +starchy, pg3200.txt +starchy. pg3200.txt +stare pg3200.txt +stare! pg3200.txt +stare, pg3200.txt +stare--it pg3200.txt +stare? pg100.txt +stared pg31100.txt, pg3200.txt +stared, pg3200.txt +stared--many pg31100.txt +stares? pg100.txt +staring pg3200.txt +staring!--staring pg3200.txt +stark pg3200.txt +starlight, pg3200.txt +starlight. pg3200.txt +starr pg3200.txt +starring." pg3200.txt +stars pg3200.txt, pg100.txt +stars! pg100.txt +stars, pg100.txt +stars. pg3200.txt, pg100.txt +stars." pg3200.txt +stars.' pg3200.txt +stars; pg100.txt +start pg31100.txt, pg3200.txt, pg100.txt +start!" pg3200.txt +start"; pg3200.txt +start, pg31100.txt, pg3200.txt, pg100.txt +start--same pg3200.txt +start. pg3200.txt +start." pg3200.txt +start: pg3200.txt +start; pg3200.txt +start] pg100.txt +started pg31100.txt, pg3200.txt, pg100.txt +started!" pg3200.txt +started, pg3200.txt, pg100.txt +started-- pg3200.txt +started--for pg3200.txt +started. pg31100.txt, pg3200.txt +started; pg3200.txt +starting pg3200.txt +starting, pg3200.txt +starting--he pg3200.txt +starting-date pg3200.txt +starting-point, pg3200.txt +starting. pg100.txt +starting." pg3200.txt +startle pg100.txt +startled pg31100.txt, pg3200.txt +startled, pg31100.txt, pg3200.txt +startling pg3200.txt +startlingly pg3200.txt +starts pg3200.txt, pg100.txt +starts, pg3200.txt, pg100.txt +starts; pg3200.txt +starts] pg100.txt +starvation pg3200.txt +starvation! pg3200.txt +starvation, pg3200.txt +starvation--i pg3200.txt +starvation. pg3200.txt +starvation.' pg3200.txt +starve pg3200.txt, pg100.txt +starve! pg100.txt +starve, pg3200.txt, pg100.txt +starve. pg3200.txt, pg100.txt +starve; pg3200.txt +starved pg3200.txt +starved, pg31100.txt, pg3200.txt +starved. pg3200.txt, pg100.txt +starved." pg31100.txt +starveling pg3200.txt, pg100.txt +starving pg3200.txt, pg100.txt +starving, pg3200.txt +starving--anything pg3200.txt +starving--francois pg3200.txt +starving. pg3200.txt +state pg31100.txt, pg3200.txt, pg100.txt +state! pg100.txt +state!" pg31100.txt, pg3200.txt +state's pg3200.txt +state), pg100.txt +state, pg31100.txt, pg3200.txt, pg100.txt +state- pg100.txt +state--" pg3200.txt +state--a pg3200.txt +state--and pg3200.txt +state-room pg3200.txt +state. pg31100.txt, pg3200.txt, pg100.txt +state." pg31100.txt, pg3200.txt +state: pg31100.txt, pg3200.txt, pg100.txt +state; pg31100.txt, pg3200.txt, pg100.txt +state? pg3200.txt, pg100.txt +state] pg100.txt +stated pg3200.txt +stated, pg31100.txt, pg3200.txt +stated. pg3200.txt +stateliest pg3200.txt +stately pg3200.txt +stately, pg3200.txt +statement pg31100.txt, pg3200.txt +statement, pg3200.txt +statement--but pg31100.txt +statement--the pg3200.txt +statement. pg3200.txt, pg100.txt +statement: pg3200.txt +statement; pg3200.txt +statements pg3200.txt +statements. pg3200.txt +stateroom pg3200.txt +states pg31100.txt, pg3200.txt, pg100.txt +states!" pg3200.txt +states, pg3200.txt, pg100.txt +states--flint-picker pg3200.txt +states-senator pg3200.txt +states. pg31100.txt, pg3200.txt, pg100.txt +states." pg3200.txt +states.' pg3200.txt +states.'" pg3200.txt +states: pg3200.txt +states; pg3200.txt +states? pg100.txt +states?" pg3200.txt +states] pg3200.txt +statesman, pg3200.txt +statesman-like? pg3200.txt +statesmanship, pg3200.txt +statesmanship; pg3200.txt +statesmen pg3200.txt +statesmen, pg3200.txt +stating pg31100.txt, pg3200.txt +station pg31100.txt, pg3200.txt, pg100.txt +station, pg31100.txt, pg3200.txt +station--ten pg3200.txt +station-house pg3200.txt +station-keeper pg3200.txt +station-keepers pg3200.txt +station-master.' pg3200.txt +station. pg3200.txt +station." pg3200.txt +station; pg3200.txt +station?' pg3200.txt +stationary, pg3200.txt +stationary?" pg3200.txt +stationed pg3200.txt +stationery, pg3200.txt +stations pg31100.txt, pg3200.txt +stations, pg3200.txt +stations. pg3200.txt +stations; pg3200.txt +statistic. pg3200.txt +statistic: pg3200.txt +statistics pg3200.txt +statistics) pg3200.txt +statistics, pg3200.txt +statistics--and pg3200.txt +statistics. pg3200.txt +statistics; pg3200.txt +statistics?" pg3200.txt +statuary pg3200.txt +statuary, pg3200.txt +statue pg3200.txt +statue, pg3200.txt, pg100.txt +statue. pg3200.txt +statue." pg3200.txt +statue.) pg3200.txt +statue] pg100.txt +statues pg3200.txt +statues, pg3200.txt +statues. pg3200.txt +statuette pg3200.txt +statuettes, pg3200.txt +stature, pg3200.txt +stature; pg3200.txt +statute pg3200.txt, pg100.txt +statute-caps. pg100.txt +statute. pg3200.txt +statutes pg3200.txt, pg100.txt +staunch pg3200.txt +stavely pg3200.txt +staves! pg100.txt +staves, pg100.txt +staving pg3200.txt +stawell pg3200.txt +stay pg31100.txt, pg3200.txt, pg100.txt +stay! pg3200.txt, pg100.txt +stay" pg31100.txt +stay'd pg100.txt +stay'd! pg100.txt +stay'd, pg100.txt +stay'd. pg100.txt +stay, pg31100.txt, pg3200.txt, pg100.txt +stay- pg100.txt +stay--but pg31100.txt +stay. pg31100.txt, pg3200.txt, pg100.txt +stay." pg31100.txt, pg3200.txt +stay.' pg3200.txt +stay._"] pg31100.txt +stay: pg3200.txt, pg100.txt +stay; pg31100.txt, pg3200.txt, pg100.txt +stay? pg100.txt +stay?" pg31100.txt, pg3200.txt +stay] pg100.txt +stayed pg31100.txt, pg3200.txt +stayed, pg3200.txt +stayed--which pg3200.txt +stayed. pg31100.txt, pg3200.txt +stayed; pg3200.txt +staying pg31100.txt, pg3200.txt, pg100.txt +staying, pg3200.txt, pg100.txt +staying-power, pg3200.txt +staying. pg31100.txt, pg3200.txt +staymaker pg31100.txt +stays pg3200.txt, pg100.txt +stays. pg100.txt +stays." pg3200.txt +stays.' pg3200.txt +stays; pg100.txt +stead pg3200.txt, pg100.txt +stead, pg100.txt +stead-y-y-y!" pg3200.txt +stead. pg100.txt +stead: pg100.txt +steadfast pg3200.txt +steadfast, pg3200.txt +steadfastness pg3200.txt +steadied pg3200.txt +steadier pg31100.txt +steadiest pg31100.txt +steadily pg31100.txt, pg3200.txt +steadily, pg31100.txt, pg3200.txt +steadily. pg3200.txt +steadiness pg31100.txt +steads pg100.txt +steady pg31100.txt, pg3200.txt +steady, pg31100.txt +steady-going pg3200.txt +steady-y-y-y!" pg3200.txt +steady. pg31100.txt +steak pg3200.txt +steak, pg3200.txt +steak. pg3200.txt +steak?" pg3200.txt +steal pg3200.txt, pg100.txt +steal! pg3200.txt +steal'? pg100.txt +steal, pg3200.txt, pg100.txt +steal. pg100.txt +steal." pg3200.txt +steal: pg100.txt +steal; pg100.txt +steal?' pg3200.txt +stealer. pg100.txt +stealer.' pg3200.txt +stealers! pg100.txt +stealing pg3200.txt +stealing. pg3200.txt +stealing." pg3200.txt +stealing?" pg3200.txt +steals pg100.txt +stealth pg100.txt +stealth, pg100.txt +stealth. pg100.txt +stealth; pg100.txt +stealthily pg3200.txt +stealthy pg3200.txt +steam pg3200.txt +steam, pg3200.txt +steam- pg3200.txt +steam-boat pg3200.txt +steam-boat's pg3200.txt +steam-boat, pg3200.txt +steam-engine? pg3200.txt +steam-plow pg3200.txt +steam.' pg3200.txt +steamboat pg3200.txt +steamboat, pg3200.txt +steamboat. pg3200.txt +steamboat." pg3200.txt +steamboatful pg3200.txt +steamboating pg3200.txt +steamboating, pg3200.txt +steamboatmen pg3200.txt +steamboatmen, pg3200.txt +steamboatmen. pg3200.txt +steamboats pg3200.txt +steamboats! pg3200.txt +steamboats, pg3200.txt +steamboats--those pg3200.txt +steamboats. pg3200.txt +steamed pg3200.txt +steamed. pg3200.txt +steamer pg3200.txt +steamer, pg3200.txt +steamer-loads pg3200.txt +steamer. pg3200.txt +steamer." pg3200.txt +steamers pg3200.txt +steamers) pg3200.txt +steamers--all pg3200.txt +steamers. pg3200.txt +steamers; pg3200.txt +steamship pg3200.txt +steamships. pg3200.txt +stebbings, pg3200.txt +steed pg100.txt +steed, pg100.txt +steed. pg3200.txt, pg100.txt +steed." pg3200.txt +steed; pg100.txt +steeds pg100.txt +steeds, pg3200.txt, pg100.txt +steel pg3200.txt, pg100.txt +steel!'" pg3200.txt +steel'd pg100.txt +steel, pg3200.txt, pg100.txt +steel- pg100.txt +steel-plates, pg3200.txt +steel. pg3200.txt, pg100.txt +steel; pg3200.txt, pg100.txt +steel? pg100.txt +steele pg31100.txt +steele's pg31100.txt +steele. pg31100.txt +steele." pg31100.txt +steeled; pg100.txt +steeles, pg31100.txt +steeles. pg31100.txt +steep pg31100.txt, pg3200.txt, pg100.txt +steep'd pg100.txt +steep, pg31100.txt +steep. pg100.txt +steep; pg100.txt +steeped pg3200.txt +steeper pg31100.txt +steepest, pg3200.txt +steeple! pg100.txt +steeple.' pg3200.txt +steepled pg3200.txt +steeplelike pg3200.txt +steeples, pg3200.txt +steeples. pg3200.txt +steepness pg3200.txt +steeps, pg3200.txt +steeps. pg3200.txt +steer pg3200.txt, pg100.txt +steer, pg3200.txt +steered pg3200.txt +steering pg3200.txt +steering-oar, pg3200.txt +steering-oar. pg3200.txt +steering. pg3200.txt +steering; pg3200.txt +steersman pg3200.txt +steersman. pg3200.txt +steersmen pg3200.txt +stelled, pg100.txt +stem pg3200.txt +stems pg3200.txt +stems, pg3200.txt +stench pg3200.txt, pg100.txt +stench, pg3200.txt +stench." pg3200.txt +stencil pg3200.txt +stenographer, pg3200.txt +stenographer. pg3200.txt +step pg31100.txt, pg3200.txt, pg100.txt +step, pg31100.txt, pg3200.txt, pg100.txt +step. pg31100.txt, pg3200.txt +step." pg3200.txt +stepchild, pg3200.txt +stepfather pg3200.txt +stephano pg100.txt +stephano! pg100.txt +stephano, pg100.txt +stephanotis pg3200.txt +stephen pg3200.txt +stephen's pg3200.txt +stephen, pg3200.txt +stephenson, pg3200.txt +stephenson. pg3200.txt +stephenson." pg3200.txt +stepmothers, pg100.txt +stepp'd pg100.txt +stepped pg3200.txt +stepping pg31100.txt, pg3200.txt +stepping-stone pg3200.txt +steps pg3200.txt, pg100.txt +steps! pg100.txt +steps, pg31100.txt, pg3200.txt +steps. pg3200.txt, pg100.txt +steps." pg3200.txt +steps; pg3200.txt, pg100.txt +steps? pg100.txt +stepsister, pg3200.txt +stept pg31100.txt +sterile pg3200.txt +sterile, pg100.txt +sterility; pg100.txt +sterling pg31100.txt, pg3200.txt +sterling, pg3200.txt +sterling. pg3200.txt +sterling:--adeiu pg31100.txt +stern pg31100.txt, pg3200.txt, pg100.txt +stern-countenanced pg3200.txt +stern-line. pg3200.txt +stern-post; pg3200.txt +stern-wheel pg3200.txt +stern-wheeler pg3200.txt +stern. pg3200.txt, pg100.txt +stern: pg3200.txt +stern; pg100.txt +sterne, pg31100.txt +sterner pg3200.txt, pg100.txt +sternly pg3200.txt +sterns pg3200.txt +stevedores pg3200.txt +stevens pg3200.txt +stevens. pg3200.txt +stew-ponds, pg31100.txt +stew. pg3200.txt +steward pg3200.txt, pg100.txt +steward! pg100.txt +steward's. pg3200.txt +steward. pg100.txt +steward; pg31100.txt +stewardship, pg100.txt +stewardship; pg100.txt +stewart pg3200.txt +stewart, pg3200.txt +stewed pg3200.txt +stewed." pg3200.txt +stews, pg100.txt +stey pg3200.txt +stick pg3200.txt, pg100.txt +stick'st pg100.txt +stick, pg3200.txt +stick--driving pg3200.txt +stick. pg3200.txt, pg100.txt +stick; pg3200.txt +sticking pg3200.txt +sticking-place pg100.txt +sticking; pg100.txt +sticks pg3200.txt +sticks, pg3200.txt +sticky pg3200.txt +sties pg3200.txt +stiff pg3200.txt, pg100.txt +stiff, pg3200.txt +stiff-brimmed pg3200.txt +stiff-standing pg3200.txt +stiff; pg100.txt +stiffen pg3200.txt +stiffening. pg3200.txt +stiffness pg3200.txt +stifled pg3200.txt, pg100.txt +stifled. pg3200.txt +stiggers, pg3200.txt +stigmatic, pg100.txt +stile pg3200.txt +stile-a; pg100.txt +stiles; pg31100.txt +stilettos, pg3200.txt +still pg31100.txt, pg3200.txt, pg100.txt +still! pg3200.txt, pg100.txt +still, pg31100.txt, pg3200.txt, pg100.txt +still--" pg3200.txt +still--' pg3200.txt +still--calling pg3200.txt +still--in pg3200.txt +still-born, pg3200.txt +still-hunt pg3200.txt +still-hunt." pg3200.txt +still. pg3200.txt, pg100.txt +still." pg31100.txt, pg3200.txt +still.' pg3200.txt, pg100.txt +still: pg3200.txt, pg100.txt +still; pg3200.txt, pg100.txt +still;-- pg3200.txt +still? pg100.txt +still?" pg31100.txt, pg3200.txt +stilled, pg3200.txt +stilled. pg3200.txt +stillman pg3200.txt +stillman!" pg3200.txt +stillman, pg3200.txt +stillness pg31100.txt, pg3200.txt +stillness, pg3200.txt +stillness. pg3200.txt +stillness: pg3200.txt +stilts, pg3200.txt +stilts. pg3200.txt +stimulate pg3200.txt +stimulated pg3200.txt +stimulus pg3200.txt +sting pg3200.txt, pg100.txt +sting, pg3200.txt, pg100.txt +sting. pg3200.txt, pg100.txt +sting; pg100.txt +sting? pg100.txt +stinging pg100.txt +stingless pg3200.txt +stings pg3200.txt +stings! pg100.txt +stings, pg100.txt +stings. pg100.txt +stingy pg3200.txt +stingy. pg3200.txt +stingy." pg3200.txt +stink pg3200.txt +stink. pg3200.txt, pg100.txt +stint pg3200.txt, pg100.txt +stipulate pg31100.txt +stipulated pg3200.txt +stipulation pg3200.txt +stipulation, pg3200.txt +stipulation. pg3200.txt +stipulation; pg31100.txt +stipulations pg3200.txt +stir pg31100.txt, pg3200.txt, pg100.txt +stir, pg3200.txt, pg100.txt +stir. pg31100.txt, pg3200.txt, pg100.txt +stir." pg3200.txt +stirnn' pg3200.txt +stirr'd pg100.txt +stirr'd; pg100.txt +stirred pg31100.txt, pg3200.txt +stirred, pg3200.txt +stirred. pg3200.txt +stirred; pg3200.txt +stirrers, pg100.txt +stirring pg31100.txt, pg3200.txt, pg100.txt +stirring, pg3200.txt +stirring. pg3200.txt, pg100.txt +stirring; pg3200.txt +stirring? pg100.txt +stirrup, pg100.txt +stirrup. pg3200.txt +stirrups pg3200.txt +stirrups, pg31100.txt, pg3200.txt +stirs pg3200.txt, pg100.txt +stirs, pg3200.txt +stirs. pg100.txt +stirs? pg100.txt +stitch pg3200.txt +stoat, pg3200.txt +stoccadoes, pg100.txt +stock pg31100.txt, pg3200.txt, pg100.txt +stock, pg31100.txt, pg3200.txt, pg100.txt +stock-raisers pg3200.txt +stock. pg3200.txt, pg100.txt +stock." pg3200.txt +stock; pg100.txt +stock?" pg3200.txt +stocked pg31100.txt, pg3200.txt +stocked, pg3200.txt +stocked. pg3200.txt +stockholders--another pg3200.txt +stocking-knitters. pg3200.txt +stockings pg31100.txt, pg3200.txt, pg100.txt +stockings, pg3200.txt, pg100.txt +stockings,'- pg100.txt +stockings. pg3200.txt, pg100.txt +stockings?" pg31100.txt +stockings?' pg100.txt +stocks pg3200.txt +stocks! pg100.txt +stocks, pg3200.txt +stocks. pg100.txt +stocks.] pg100.txt +stocks? pg100.txt +stocks?" pg3200.txt +stockton, pg3200.txt +stockton. pg3200.txt +stoddard pg3200.txt +stoddard, pg3200.txt +stoddard: pg3200.txt +stogy. pg3200.txt +stoical pg3200.txt +stokes pg31100.txt +stokes,' pg31100.txt +stol'n pg100.txt +stol'n, pg100.txt +stol'n. pg100.txt +stole pg3200.txt, pg100.txt +stole!" pg3200.txt +stole, pg100.txt +stolen pg31100.txt, pg3200.txt, pg100.txt +stolen!" pg3200.txt +stolen, pg3200.txt +stolen. pg3200.txt +stolen." pg3200.txt +stolen?" pg3200.txt +stolid pg3200.txt +stomach pg31100.txt, pg3200.txt, pg100.txt +stomach! pg3200.txt +stomach!" pg3200.txt +stomach, pg3200.txt, pg100.txt +stomach-ache pg3200.txt +stomach-ache. pg3200.txt +stomach. pg3200.txt, pg100.txt +stomach.' pg3200.txt +stomachers, pg100.txt +stomaching. pg100.txt +stomachs pg3200.txt, pg100.txt +stomachs!" pg3200.txt +stomachs, pg3200.txt +stomachs. pg100.txt +ston'd; pg100.txt +stone pg31100.txt, pg3200.txt, pg100.txt +stone! pg3200.txt, pg100.txt +stone" pg3200.txt +stone's pg3200.txt +stone's. pg3200.txt +stone), pg3200.txt +stone, pg3200.txt, pg100.txt +stone- pg100.txt +stone----an pg3200.txt +stone--that pg3200.txt +stone-benched pg3200.txt +stone-blind pg3200.txt +stone-boats; pg3200.txt +stone-breaker, pg3200.txt +stone-paved pg3200.txt +stone-still. pg100.txt +stone. pg3200.txt, pg100.txt +stone." pg3200.txt +stone.' pg3200.txt +stone; pg3200.txt, pg100.txt +stone?" pg3200.txt +stoned pg3200.txt +stones pg3200.txt, pg100.txt +stones!'] pg100.txt +stones) pg100.txt +stones, pg3200.txt, pg100.txt +stones-" pg100.txt +stones. pg3200.txt, pg100.txt +stones; pg3200.txt, pg100.txt +stones? pg100.txt +stonewall." pg3200.txt +stonework, pg3200.txt +stoning pg3200.txt +stony pg3200.txt +stood pg31100.txt, pg3200.txt, pg100.txt +stood! pg3200.txt +stood, pg3200.txt, pg100.txt +stood--the pg3200.txt +stood. pg31100.txt, pg3200.txt, pg100.txt +stood; pg3200.txt +stool pg100.txt +stool, pg100.txt +stool. pg100.txt +stools, pg3200.txt +stoop pg3200.txt, pg100.txt +stoop, pg100.txt +stoop- pg3200.txt +stoop-shouldered pg3200.txt +stoop. pg100.txt +stoop." pg3200.txt +stoop; pg100.txt +stoop? pg100.txt +stooped pg3200.txt +stoopeth pg3200.txt +stooping pg3200.txt +stooping- pg100.txt +stooping. pg100.txt +stoops pg31100.txt +stop pg31100.txt, pg3200.txt, pg100.txt +stop! pg100.txt +stop!" pg31100.txt +stop, pg31100.txt, pg3200.txt, pg100.txt +stop- pg3200.txt +stop. pg31100.txt, pg3200.txt, pg100.txt +stop." pg3200.txt +stop._] pg31100.txt +stop; pg3200.txt, pg100.txt +stop? pg3200.txt +stopp'd pg100.txt +stopp'd, pg100.txt +stopp'd. pg100.txt +stopp'd; pg100.txt +stopped pg31100.txt, pg3200.txt +stopped, pg31100.txt, pg3200.txt +stopped--that pg3200.txt +stopped--the pg3200.txt +stopped. pg31100.txt, pg3200.txt +stopped.' pg3200.txt +stopping pg31100.txt, pg3200.txt +stopping, pg3200.txt +stopping. pg3200.txt +stopple pg3200.txt +stops pg3200.txt +stops, pg31100.txt +stops. pg100.txt +stops; pg3200.txt, pg100.txt +stopt pg31100.txt +stor'd. pg100.txt +store pg31100.txt, pg3200.txt, pg100.txt +store, pg3200.txt, pg100.txt +store- pg100.txt +store. pg3200.txt, pg100.txt +store." pg3200.txt +store.) pg3200.txt +store: pg100.txt +store; pg3200.txt, pg100.txt +stored pg3200.txt +storen. pg3200.txt +stores pg31100.txt, pg3200.txt +stores, pg3200.txt +storhbach's pg3200.txt +storied pg3200.txt +stories pg31100.txt, pg3200.txt +stories, pg3200.txt +stories. pg3200.txt +stories: pg31100.txt +stories; pg3200.txt +storing pg3200.txt +stork's pg3200.txt +storm pg31100.txt, pg3200.txt, pg100.txt +storm! pg3200.txt, pg100.txt +storm!" pg3200.txt +storm). pg3200.txt +storm, pg31100.txt, pg3200.txt, pg100.txt +storm- pg3200.txt +storm--(repeat pg3200.txt +storm-blast pg3200.txt +storm-center pg3200.txt +storm-clouds; pg3200.txt +storm-swept pg3200.txt +storm. pg3200.txt, pg100.txt +storm." pg3200.txt +storm: pg3200.txt +storm; pg3200.txt, pg100.txt +storm;" pg3200.txt +storm? pg3200.txt +storm?" pg3200.txt +stormfield pg3200.txt +stormin'." pg3200.txt +storming pg3200.txt +storming, pg3200.txt +storms pg31100.txt, pg3200.txt +storms, pg3200.txt, pg100.txt +storms. pg3200.txt, pg100.txt +storms." pg3200.txt +storms; pg100.txt +stormy pg3200.txt +stormy. pg3200.txt +stornaway, pg31100.txt +storrs, pg3200.txt +story pg31100.txt, pg3200.txt, pg100.txt +story" pg3200.txt +story's pg3200.txt +story, pg31100.txt, pg3200.txt, pg100.txt +story- pg100.txt +story-- pg3200.txt +story--it pg3200.txt +story--with pg3200.txt +story-books pg3200.txt +story-teller pg3200.txt +story-tellers pg3200.txt +story. pg31100.txt, pg3200.txt, pg100.txt +story." pg31100.txt, pg3200.txt +story.' pg3200.txt +story: pg3200.txt, pg100.txt +story; pg31100.txt, pg3200.txt, pg100.txt +story? pg31100.txt, pg3200.txt, pg100.txt +stoup pg100.txt +stout pg31100.txt, pg3200.txt +stout, pg100.txt +stout-hearted pg3200.txt +stout." pg31100.txt +stoutish, pg3200.txt +stoutly pg3200.txt +stoutness pg100.txt +stove pg3200.txt +stove, pg31100.txt, pg3200.txt +stove-door pg3200.txt +stove-pipe pg3200.txt +stove-pipe, pg3200.txt +stove-polish pg3200.txt +stove. pg3200.txt +stove.' pg3200.txt +stove; pg3200.txt +stovepipe pg3200.txt +stovepipe, pg3200.txt +stow'd. pg100.txt +stowe pg3200.txt +stowed pg3200.txt +stowed, pg100.txt +straddle pg3200.txt +straddled pg3200.txt +straggle pg3200.txt +straggler pg3200.txt +stragglers pg3200.txt +stragglers. pg3200.txt +straggling pg3200.txt +straight pg3200.txt, pg100.txt +straight, pg3200.txt, pg100.txt +straight-laced pg3200.txt +straight-off--thus: pg3200.txt +straight-up-and-down pg3200.txt +straight. pg3200.txt, pg100.txt +straight." pg3200.txt +straight: pg100.txt +straight; pg3200.txt, pg100.txt +straight?" pg3200.txt +straighten pg3200.txt +straightened pg3200.txt +straightened; pg3200.txt +straightening pg3200.txt +straightforward pg3200.txt +straightforward: pg3200.txt +straightforwardness; pg3200.txt +straightway pg3200.txt +straightway, pg3200.txt +straightway-- pg3200.txt +straightway." pg3200.txt +straightway; pg3200.txt +straightway? pg100.txt +strain pg3200.txt, pg100.txt +strain! pg100.txt +strain'd; pg100.txt +strain, pg3200.txt, pg100.txt +strain. pg3200.txt +strain." pg3200.txt +strain: pg3200.txt +strained pg3200.txt +strained; pg3200.txt +straining pg3200.txt +strains pg3200.txt, pg100.txt +strains, pg100.txt +strait pg3200.txt, pg100.txt +strait-jacket!' pg3200.txt +strait. pg100.txt +straited pg100.txt +straitened pg31100.txt +straits pg3200.txt +strand pg3200.txt +strand, pg3200.txt, pg100.txt +strand. pg100.txt +strange pg31100.txt, pg3200.txt, pg100.txt +strange! pg31100.txt, pg100.txt +strange!" pg31100.txt, pg3200.txt +strange!--after pg31100.txt +strange, pg3200.txt, pg100.txt +strange--" pg3200.txt +strange--believed pg3200.txt +strange--stranger pg3200.txt +strange. pg3200.txt, pg100.txt +strange." pg3200.txt +strange: pg3200.txt, pg100.txt +strange; pg100.txt +strange? pg100.txt +strange?" pg3200.txt +strangely pg31100.txt, pg3200.txt, pg100.txt +strangely. pg100.txt +strangely? pg100.txt +strangeness pg100.txt +strangeness. pg100.txt +stranger pg31100.txt, pg3200.txt, pg100.txt +stranger! pg31100.txt, pg3200.txt +stranger!" pg3200.txt +stranger's pg3200.txt +stranger, pg31100.txt, pg3200.txt, pg100.txt +stranger--" pg3200.txt +stranger--and pg3200.txt +stranger--consequently pg3200.txt +stranger--for pg3200.txt +stranger--to pg3200.txt +stranger-march pg100.txt +stranger. pg3200.txt, pg100.txt +stranger." pg31100.txt, pg3200.txt +stranger; pg3200.txt +stranger? pg100.txt +stranger?" pg3200.txt +strangers pg3200.txt, pg100.txt +strangers! pg100.txt +strangers, pg31100.txt, pg3200.txt +strangers- pg100.txt +strangers. pg31100.txt, pg3200.txt, pg100.txt +strangers." pg31100.txt, pg3200.txt +strangers; pg31100.txt +strangers?" pg31100.txt +strangest pg3200.txt, pg100.txt +strangle pg3200.txt +strangled pg3200.txt, pg100.txt +strangled, pg3200.txt +strangler pg3200.txt, pg100.txt +strangles pg3200.txt +strangling pg3200.txt +strangulated pg3200.txt +strappado pg100.txt +strapped pg3200.txt +strapping, pg3200.txt +straps pg3200.txt +straps. pg100.txt +stratagem pg3200.txt, pg100.txt +stratagem! pg100.txt +stratagem, pg100.txt +stratagem. pg100.txt +stratagems pg100.txt +stratagems. pg100.txt +strategist's pg3200.txt +strategy pg3200.txt +strategy, pg3200.txt +strategy. pg3200.txt +stratford pg3200.txt +stratford, pg100.txt +stratford. pg3200.txt +stratfordians pg3200.txt +stratified pg3200.txt +strato? pg100.txt +straw pg31100.txt, pg3200.txt, pg100.txt +straw, pg3200.txt, pg100.txt +straw--now pg3200.txt +straw. pg100.txt +straw? pg100.txt +strawberries pg3200.txt +strawberries. pg100.txt +strawberries." pg3200.txt +strawberry pg3200.txt +strawbries." pg3200.txt +straws!' pg3200.txt +straws, pg100.txt +straws. pg3200.txt +stray pg100.txt +stray'd pg100.txt +stray, pg100.txt +stray. pg100.txt +stray: pg100.txt +strays, pg100.txt +streak pg3200.txt +streaked pg3200.txt +streaked, pg3200.txt +streaks pg3200.txt, pg100.txt +stream pg31100.txt, pg3200.txt, pg100.txt +stream's pg3200.txt +stream, pg31100.txt, pg3200.txt, pg100.txt +stream. pg3200.txt, pg100.txt +stream.' pg3200.txt +stream: pg100.txt +stream; pg31100.txt, pg3200.txt +stream? pg100.txt +streamed pg3200.txt +streamers; pg3200.txt +streaming pg3200.txt +streams pg3200.txt +streams, pg3200.txt, pg100.txt +streams. pg100.txt +street pg31100.txt, pg3200.txt, pg100.txt +street! pg3200.txt +street'; pg3200.txt +street, pg31100.txt, pg3200.txt, pg100.txt +street,' pg3200.txt +street-cars. pg3200.txt +street-lamp. pg3200.txt +street-lamps. pg3200.txt +street. pg31100.txt, pg3200.txt, pg100.txt +street." pg31100.txt, pg3200.txt +street.' pg3200.txt +street.) pg3200.txt +street; pg31100.txt, pg3200.txt, pg100.txt +street? pg31100.txt, pg3200.txt, pg100.txt +street?" pg31100.txt, pg3200.txt +streets pg31100.txt, pg3200.txt, pg100.txt +streets, pg3200.txt, pg100.txt +streets--a pg3200.txt +streets--specimens pg3200.txt +streets--why, pg3200.txt +streets. pg3200.txt, pg100.txt +streets: pg100.txt +streets; pg31100.txt, pg3200.txt, pg100.txt +streets? pg100.txt +strength pg31100.txt, pg3200.txt, pg100.txt +strength, pg31100.txt, pg3200.txt, pg100.txt +strength," pg31100.txt +strength--and pg3200.txt +strength--it pg31100.txt +strength-giving pg3200.txt +strength. pg31100.txt, pg3200.txt, pg100.txt +strength." pg3200.txt +strength; pg31100.txt, pg3200.txt, pg100.txt +strengthen pg31100.txt, pg100.txt +strengthened pg31100.txt +strengthened, pg31100.txt +strengthened. pg100.txt +strengthened; pg3200.txt +strengthening pg31100.txt, pg3200.txt +strengths pg100.txt +strenuous pg3200.txt +strenuous--strenuous pg3200.txt +strenuously pg3200.txt +strenuously. pg3200.txt +stress pg3200.txt +stress, pg3200.txt +stretch pg31100.txt, pg3200.txt, pg100.txt +stretch! pg3200.txt +stretch'd; pg100.txt +stretch. pg3200.txt +stretch; pg100.txt +stretched pg31100.txt, pg3200.txt +stretched, pg3200.txt +stretchers, pg3200.txt +stretches pg3200.txt, pg100.txt +stretching pg31100.txt, pg3200.txt +stretching, pg3200.txt +stretchy, pg3200.txt +strew pg100.txt +strew'd, pg100.txt +strew. pg100.txt +strewed pg3200.txt +strewn pg3200.txt +stricken pg3200.txt +stricken. pg3200.txt +strickly pg3200.txt +strict pg31100.txt, pg3200.txt, pg100.txt +strict, pg3200.txt +strict- pg3200.txt +strict." pg31100.txt +strictest pg31100.txt, pg3200.txt +strictly pg31100.txt, pg3200.txt +strictly. pg3200.txt +strictness, pg3200.txt +stride pg100.txt +stride. pg3200.txt +strides pg3200.txt +strides, pg3200.txt +strides. pg3200.txt +striding pg3200.txt +strife pg100.txt +strife! pg100.txt +strife, pg3200.txt, pg100.txt +strife. pg100.txt +strife; pg100.txt +strife? pg100.txt +strik'st pg100.txt +strike pg31100.txt, pg3200.txt, pg100.txt +strike! pg100.txt +strike, pg3200.txt, pg100.txt +strike. pg100.txt +strike? pg100.txt +strikes pg31100.txt, pg3200.txt, pg100.txt +strikes, pg3200.txt +strikes. pg100.txt +strikes] pg100.txt +strikest pg100.txt +striking pg31100.txt, pg3200.txt, pg100.txt +striking, pg3200.txt +striking. pg31100.txt, pg3200.txt +striking; pg31100.txt +strikingly pg31100.txt, pg3200.txt +string pg3200.txt +string, pg31100.txt, pg3200.txt, pg100.txt +string--do pg3200.txt +string. pg3200.txt +string." pg3200.txt +string; pg100.txt +stringent. pg3200.txt +stringing pg3200.txt +strings pg31100.txt, pg3200.txt, pg100.txt +strings, pg100.txt +strings. pg3200.txt +strings? pg100.txt +strip pg3200.txt +strip!" pg3200.txt +strip, pg3200.txt +strip." pg3200.txt +stripe pg3200.txt +stripe:] pg3200.txt +striped pg31100.txt, pg3200.txt +stripes pg3200.txt +stripes, pg3200.txt +stripes. pg3200.txt +stripling pg3200.txt +stripp'd. pg100.txt +stripped pg3200.txt +stripping pg3200.txt, pg100.txt +strips pg3200.txt +strips; pg3200.txt +strive pg3200.txt, pg100.txt +strive, pg100.txt +strive- pg100.txt +strives pg100.txt +strives, pg100.txt +striving, pg3200.txt, pg100.txt +strode pg3200.txt +strode, pg3200.txt +stroke pg31100.txt, pg3200.txt, pg100.txt +stroke!" pg3200.txt +stroke, pg3200.txt, pg100.txt +stroke-oar pg3200.txt +stroke. pg31100.txt, pg3200.txt, pg100.txt +stroke; pg100.txt +stroke? pg3200.txt +stroked pg3200.txt +stroked. pg3200.txt +strokes, pg3200.txt, pg100.txt +strokes--a pg3200.txt +strokes. pg3200.txt, pg100.txt +stroking pg3200.txt +stroll pg31100.txt, pg3200.txt +strolling pg3200.txt +strolls pg3200.txt +stromboli--sicily pg3200.txt +strond, pg100.txt +strong pg31100.txt, pg3200.txt, pg100.txt +strong, pg31100.txt, pg3200.txt, pg100.txt +strong- pg100.txt +strong--and pg31100.txt +strong. pg31100.txt, pg3200.txt, pg100.txt +strong." pg31100.txt, pg3200.txt +strong.' pg3200.txt +strong.'" pg31100.txt +strong.- pg100.txt +strong; pg31100.txt, pg3200.txt, pg100.txt +strong? pg100.txt +strong?" pg31100.txt +stronger pg31100.txt, pg3200.txt, pg100.txt +stronger, pg31100.txt, pg3200.txt +stronger. pg31100.txt +stronger?" pg31100.txt +strongest pg31100.txt, pg3200.txt +strongest." pg31100.txt +stronghold pg3200.txt +strongholds pg3200.txt +strongholds. pg3200.txt +strongholds?" pg3200.txt +strongly pg31100.txt, pg3200.txt, pg100.txt +strongly-marked pg31100.txt +strongly. pg100.txt +strossers. pg100.txt +strove pg100.txt +strove. pg3200.txt +strown; pg100.txt +struck pg31100.txt, pg3200.txt, pg100.txt +struck!" pg3200.txt +struck, pg3200.txt, pg100.txt +struck. pg31100.txt, pg3200.txt, pg100.txt +struck; pg3200.txt +struck?" pg3200.txt +strucken pg100.txt +structurally pg3200.txt +structure pg3200.txt +structure, pg3200.txt +structure. pg3200.txt +structures; pg3200.txt +struggle pg31100.txt, pg3200.txt +struggle, pg3200.txt +struggle--eng pg3200.txt +struggle. pg3200.txt +struggle." pg31100.txt +struggle.) pg3200.txt +struggle?" pg3200.txt +struggled pg31100.txt, pg3200.txt +struggled, pg3200.txt +struggled: pg3200.txt +struggles pg3200.txt +struggles, pg3200.txt +struggles] pg100.txt +struggling pg31100.txt, pg3200.txt +struggling, pg3200.txt +struggling; pg100.txt +strumpet pg100.txt +strumpet! pg100.txt +strumpet, pg100.txt +strumpet. pg100.txt +strumpet? pg100.txt +strumpeted, pg100.txt +strung pg3200.txt +strung, pg3200.txt +strut pg100.txt +strychnine pg3200.txt +stuart. pg31100.txt +stubborn pg31100.txt, pg3200.txt +stubborn. pg31100.txt +stubbornly pg3200.txt +stuck pg31100.txt, pg3200.txt, pg100.txt +stuck, pg100.txt +stuck-up--i pg3200.txt +stud, pg3200.txt +student pg3200.txt, pg100.txt +student, pg3200.txt +student. pg3200.txt, pg100.txt +student." pg3200.txt +student; pg3200.txt +students pg3200.txt, pg100.txt +students, pg3200.txt +students. pg3200.txt +students; pg3200.txt +studied pg31100.txt, pg3200.txt, pg100.txt +studied, pg100.txt +studied." pg31100.txt, pg3200.txt +studien pg3200.txt +studies pg3200.txt, pg100.txt +studies, pg3200.txt +studies. pg3200.txt, pg100.txt +studies; pg3200.txt +studio.] pg3200.txt +studious, pg3200.txt +studious. pg31100.txt +studiously pg3200.txt +studs; pg3200.txt +study pg31100.txt, pg3200.txt, pg100.txt +study, pg3200.txt, pg100.txt +study- pg100.txt +study. pg31100.txt, pg3200.txt, pg100.txt +study." pg31100.txt +study; pg3200.txt, pg100.txt +study? pg100.txt +study?" pg31100.txt +studying pg31100.txt, pg3200.txt, pg100.txt +studying. pg3200.txt +studying." pg3200.txt +stuff pg31100.txt, pg3200.txt, pg100.txt +stuff! pg100.txt +stuff!" pg3200.txt +stuff'd pg100.txt +stuff, pg3200.txt +stuff--but pg3200.txt +stuff. pg3200.txt, pg100.txt +stuff." pg3200.txt +stuff.' pg3200.txt +stuff: pg100.txt +stuff; pg100.txt +stuff? pg100.txt +stuffed pg3200.txt +stuffed, pg3200.txt +stuffed." pg3200.txt +stuffed.' pg3200.txt +stuffs. pg3200.txt +stuffy, pg3200.txt +stumble pg3200.txt +stumble, pg100.txt +stumble. pg100.txt +stumbled pg3200.txt +stumbles pg3200.txt +stumbling-blocks pg100.txt +stumbling. pg3200.txt +stump pg3200.txt +stump? pg3200.txt +stump?" pg3200.txt +stumps pg3200.txt +stumps; pg3200.txt +stung pg3200.txt, pg100.txt +stung, pg3200.txt +stung. pg3200.txt, pg100.txt +stunned pg3200.txt +stunned. pg31100.txt, pg3200.txt +stunning pg3200.txt +stupefaction; pg31100.txt +stupefied pg3200.txt +stupefied, pg3200.txt +stupefied." pg31100.txt +stupefied.] pg3200.txt +stupefied; pg3200.txt +stupefying pg3200.txt +stupefying. pg3200.txt +stupendous pg3200.txt +stupid pg31100.txt, pg3200.txt, pg100.txt +stupid, pg31100.txt +stupid. pg31100.txt, pg3200.txt +stupid." pg31100.txt +stupid; pg3200.txt +stupid?" pg31100.txt +stupider pg3200.txt +stupidest pg3200.txt +stupidities pg3200.txt +stupidity pg3200.txt +stupidly pg3200.txt +stupified pg31100.txt, pg100.txt +sturdiest pg3200.txt +sturdily pg3200.txt +sturdy pg3200.txt +sturgis pg3200.txt +sturgis, pg3200.txt +sturt; pg3200.txt +sty pg100.txt +sty! pg100.txt +styl'd pg100.txt +style pg31100.txt, pg3200.txt, pg100.txt +style, pg31100.txt, pg3200.txt, pg100.txt +style--better pg3200.txt +style--he pg3200.txt +style--no pg3200.txt +style. pg31100.txt, pg3200.txt, pg100.txt +style." pg31100.txt +style.--and pg31100.txt +style: pg3200.txt +style; pg31100.txt, pg3200.txt, pg100.txt +style? pg100.txt +styled pg3200.txt +styles pg3200.txt +stylish pg3200.txt +stylish, pg3200.txt +styx pg100.txt +styx? pg100.txt +suasion: pg3200.txt +sub-contract pg3200.txt +sub-sequent pg3200.txt +subagent, pg3200.txt +subdivisions pg3200.txt +subdu'd pg100.txt +subdu'd, pg100.txt +subdu'd. pg100.txt +subdu'd; pg100.txt +subdue pg31100.txt, pg3200.txt, pg100.txt +subdue, pg100.txt +subdue. pg31100.txt +subdued pg3200.txt, pg100.txt +subdued, pg3200.txt +subdued. pg31100.txt, pg3200.txt +subduements, pg100.txt +subduing pg31100.txt, pg3200.txt +subject pg31100.txt, pg3200.txt, pg100.txt +subject! pg3200.txt +subject!'" pg3200.txt +subject's pg100.txt +subject, pg31100.txt, pg3200.txt, pg100.txt +subject-- pg3200.txt +subject--" pg3200.txt +subject-matter. pg3200.txt +subject. pg31100.txt, pg3200.txt, pg100.txt +subject." pg31100.txt, pg3200.txt +subject.) pg3200.txt +subject.-- pg31100.txt +subject: pg3200.txt +subject:--a pg31100.txt +subject; pg31100.txt, pg3200.txt +subject? pg3200.txt, pg100.txt +subject?" pg31100.txt, pg3200.txt +subjected pg3200.txt +subjection pg31100.txt, pg3200.txt +subjection. pg100.txt +subjectry pg3200.txt +subjects pg31100.txt, pg3200.txt, pg100.txt +subjects, pg31100.txt, pg3200.txt, pg100.txt +subjects- pg100.txt +subjects. pg31100.txt, pg3200.txt, pg100.txt +subjects." pg31100.txt +subjects; pg3200.txt, pg100.txt +subjects? pg100.txt +subjoin pg31100.txt +subjugated. pg3200.txt +sublime pg3200.txt +sublime. pg3200.txt +sublime." pg3200.txt +sublime; pg3200.txt +sublimer, pg3200.txt +sublimities pg3200.txt +sublimities. pg3200.txt +sublimity pg3200.txt +sublimity, pg3200.txt +sublimity. pg3200.txt +submerg'd pg100.txt +submerged, pg3200.txt +submerged. pg3200.txt +submission pg31100.txt, pg3200.txt +submission! pg100.txt +submission, pg31100.txt +submission. pg3200.txt, pg100.txt +submissiveness pg3200.txt +submit pg31100.txt, pg3200.txt, pg100.txt +submit, pg100.txt +submit. pg31100.txt, pg3200.txt +submit." pg3200.txt +submit; pg31100.txt +submit? pg100.txt +submitted pg31100.txt, pg3200.txt +submitted, pg31100.txt, pg3200.txt +submitted--creil, pg3200.txt +submitting pg3200.txt +subordinate pg31100.txt, pg3200.txt +subordinate. pg3200.txt +subordinated, pg3200.txt +subordinates pg3200.txt +subordination pg31100.txt +suborn pg100.txt +suborn'd pg100.txt +suborn'd: pg100.txt +subornation, pg100.txt +subornation- pg100.txt +subscrib'd pg100.txt +subscrib'd. pg100.txt +subscribe pg31100.txt, pg3200.txt +subscribe. pg3200.txt +subscribe; pg3200.txt, pg100.txt +subscribed pg3200.txt +subscribed. pg3200.txt +subscriber pg3200.txt +subscriber." pg3200.txt +subscribers pg3200.txt +subscribers, pg3200.txt +subscribers. pg3200.txt +subscribes pg100.txt +subscribes, pg100.txt +subscription pg3200.txt +subscription. pg3200.txt +subscriptions) pg3200.txt +subscriptions. pg3200.txt +subsequent pg31100.txt, pg3200.txt +subsequent. pg3200.txt +subsequently, pg31100.txt, pg3200.txt +subside pg31100.txt +subside, pg31100.txt +subsided pg31100.txt, pg3200.txt +subsided, pg3200.txt +subsided; pg31100.txt +subsidies, pg100.txt +subsiding pg3200.txt +subsidize. pg3200.txt +subsidy pg3200.txt +subsidy. pg100.txt +subsist pg3200.txt +subsist, pg100.txt +subsisted pg31100.txt, pg3200.txt +subsistence pg3200.txt +subsisting pg31100.txt, pg100.txt +subsoiling. pg3200.txt +substance pg31100.txt, pg3200.txt, pg100.txt +substance! pg100.txt +substance, pg3200.txt, pg100.txt +substance. pg3200.txt +substance: pg3200.txt +substance; pg3200.txt +substances pg100.txt +substances. pg100.txt +substantial pg3200.txt +substantial, pg3200.txt +substantial. pg100.txt +substantially pg3200.txt +substantially. pg3200.txt +substitute pg3200.txt +substitute, pg100.txt +substitute; pg100.txt +substituted pg3200.txt +substituted. pg3200.txt +substitutes pg100.txt +substitutes. pg100.txt +substitution pg31100.txt, pg3200.txt +substitution, pg100.txt +subterranean pg3200.txt +subtle pg3200.txt, pg100.txt +subtle, pg100.txt +subtler pg3200.txt +subtleties pg3200.txt +subtleties. pg100.txt +subtlety pg3200.txt +subtlety, pg100.txt +subtly pg3200.txt +subtracted pg3200.txt +subtracting pg3200.txt +subtractors pg100.txt +suburbs pg3200.txt, pg100.txt +suburbs? pg100.txt +subway; pg3200.txt +succedant'- pg100.txt +succeed pg31100.txt, pg3200.txt +succeed, pg31100.txt, pg3200.txt, pg100.txt +succeed. pg31100.txt, pg3200.txt, pg100.txt +succeed." pg31100.txt, pg3200.txt +succeed: pg3200.txt +succeed; pg3200.txt, pg100.txt +succeed? pg100.txt +succeed?" pg3200.txt +succeeded pg31100.txt, pg3200.txt +succeeded! pg3200.txt +succeeded, pg31100.txt, pg3200.txt +succeeded--saying-- pg3200.txt +succeeded--the pg3200.txt +succeeded. pg3200.txt +succeeded; pg31100.txt +succeeding pg31100.txt, pg3200.txt +succeeding. pg3200.txt +succeeding." pg31100.txt +succeeding?" pg31100.txt +succeeds pg3200.txt, pg100.txt +succeeds, pg100.txt +succeeds. pg3200.txt +success pg31100.txt, pg3200.txt, pg100.txt +success! pg100.txt +success, pg31100.txt, pg3200.txt, pg100.txt +success. pg31100.txt, pg3200.txt, pg100.txt +success." pg31100.txt, pg3200.txt +success: pg100.txt +success; pg31100.txt, pg3200.txt +success? pg3200.txt, pg100.txt +success?" pg31100.txt +successful pg3200.txt +successful, pg3200.txt +successful. pg31100.txt, pg3200.txt +successfully pg3200.txt +successfully, pg3200.txt, pg100.txt +successfully. pg3200.txt, pg100.txt +succession pg3200.txt, pg100.txt +succession, pg31100.txt, pg3200.txt, pg100.txt +succession--what pg3200.txt +succession. pg3200.txt, pg100.txt +succession; pg100.txt +succession? pg100.txt +successively pg31100.txt, pg3200.txt +successively, pg31100.txt +successively. pg100.txt +successor pg3200.txt, pg100.txt +successor, pg3200.txt +successor. pg3200.txt +successors pg3200.txt +succor pg3200.txt +succor. pg3200.txt +succored pg3200.txt +succoring pg3200.txt +succors, pg3200.txt +succour. pg100.txt +succumb. pg3200.txt +succumbed pg3200.txt +succumbs. pg3200.txt +such, pg31100.txt, pg3200.txt, pg100.txt +such--" pg3200.txt +such-a-one, pg100.txt +such-and-such pg3200.txt +such. pg31100.txt, pg3200.txt, pg100.txt +such." pg3200.txt +such; pg3200.txt +such;--but pg31100.txt +suchlike--and pg3200.txt +suck pg3200.txt, pg100.txt +suck, pg100.txt +suck. pg100.txt +suck; pg100.txt +sucked pg3200.txt +sucking pg3200.txt +sucklings pg31100.txt +sucklings. pg31100.txt +sucks pg3200.txt, pg100.txt +sudden pg31100.txt, pg3200.txt, pg100.txt +sudden!" pg3200.txt +sudden, pg3200.txt, pg100.txt +sudden--but pg3200.txt +sudden. pg3200.txt, pg100.txt +sudden; pg3200.txt, pg100.txt +sudden? pg100.txt +suddenly pg31100.txt, pg3200.txt, pg100.txt +suddenly! pg31100.txt +suddenly, pg31100.txt, pg3200.txt, pg100.txt +suddenly--" pg3200.txt +suddenly. pg3200.txt, pg100.txt +suddenly." pg31100.txt +suddenly: pg3200.txt +suddenly; pg100.txt +suddenly? pg100.txt +suddenness pg31100.txt, pg3200.txt +suddenness, pg3200.txt +suddenness. pg3200.txt +sudgest. pg3200.txt +sudra pg3200.txt +sue pg31100.txt, pg3200.txt, pg100.txt +sue's pg3200.txt +sue, pg100.txt +sue; pg100.txt +sue? pg100.txt +sues pg100.txt +sues; pg100.txt +suez-alexandria,...............224 pg3200.txt +suff'rance pg100.txt +suffciently pg31100.txt +suffer pg31100.txt, pg3200.txt, pg100.txt +suffer! pg31100.txt +suffer'd pg100.txt +suffer'st pg100.txt +suffer, pg31100.txt, pg3200.txt, pg100.txt +suffer- pg100.txt +suffer--all pg3200.txt +suffer--by pg3200.txt +suffer--perfectly pg3200.txt +suffer. pg31100.txt, pg3200.txt, pg100.txt +suffer." pg31100.txt, pg3200.txt +suffer; pg31100.txt, pg100.txt +sufferance pg3200.txt, pg100.txt +sufferance. pg100.txt +sufferance." pg3200.txt +sufferance; pg100.txt +suffered pg31100.txt, pg3200.txt, pg100.txt +suffered, pg31100.txt, pg3200.txt +suffered--" pg31100.txt +suffered. pg31100.txt, pg3200.txt, pg100.txt +sufferer pg3200.txt +sufferer. pg31100.txt, pg3200.txt +sufferers pg3200.txt +sufferers, pg3200.txt +sufferers. pg3200.txt +suffering pg31100.txt, pg3200.txt +suffering!--not pg3200.txt +suffering, pg31100.txt, pg3200.txt +suffering-- pg3200.txt +suffering--" pg3200.txt +suffering--i pg3200.txt +suffering-machine pg3200.txt +suffering. pg31100.txt, pg3200.txt +suffering." pg3200.txt +suffering? pg3200.txt +sufferings pg31100.txt, pg3200.txt +sufferings, pg31100.txt, pg3200.txt +sufferings. pg3200.txt +suffers pg31100.txt, pg100.txt +suffers. pg100.txt +suffers." pg31100.txt +suffic'd, pg100.txt +suffice pg31100.txt, pg3200.txt +suffice, pg100.txt +suffice. pg3200.txt +suffice." pg31100.txt, pg3200.txt +suffice; pg100.txt +sufficed, pg100.txt +sufficed. pg3200.txt +suffices pg100.txt +suffices." pg3200.txt +sufficeth pg100.txt +sufficiency pg3200.txt, pg100.txt +sufficiency, pg31100.txt, pg100.txt +sufficient pg31100.txt, pg3200.txt, pg100.txt +sufficient, pg3200.txt +sufficient--take pg3200.txt +sufficient. pg31100.txt, pg3200.txt, pg100.txt +sufficient." pg3200.txt +sufficient.--she pg31100.txt +sufficient; pg3200.txt +sufficient?" pg31100.txt +sufficiently pg31100.txt, pg3200.txt +sufficiently. pg31100.txt, pg3200.txt, pg100.txt +sufficit. pg100.txt +suffigance. pg100.txt +suffise pg3200.txt +suffocate!" pg3200.txt +suffocate, pg100.txt +suffocate. pg100.txt +suffocate.] pg3200.txt +suffocate; pg3200.txt +suffocated pg3200.txt +suffocating pg3200.txt +suffocating." pg3200.txt +suffocatings. pg3200.txt +suffocation, pg3200.txt +suffocation. pg3200.txt +suffocations: pg3200.txt +suffolk pg3200.txt, pg100.txt +suffolk! pg100.txt +suffolk, pg100.txt +suffolk- pg100.txt +suffolk. pg100.txt +suffolk." pg31100.txt +suffolk; pg100.txt +suffolk? pg100.txt +suffolk?' pg100.txt +suffrage pg3200.txt +suffrages pg100.txt +suffrages: pg100.txt +suffuse pg3200.txt +sugar pg3200.txt +sugar, pg3200.txt, pg100.txt +sugar-loaf. pg3200.txt +sugar. pg3200.txt, pg100.txt +sugar: pg3200.txt +sugar?" pg3200.txt +sugarsop, pg100.txt +suggest pg31100.txt, pg3200.txt, pg100.txt +suggest. pg3200.txt +suggest; pg31100.txt, pg3200.txt +suggest?" pg3200.txt +suggest?' pg3200.txt +suggested pg31100.txt, pg3200.txt, pg100.txt +suggested, pg31100.txt, pg100.txt +suggested-- pg31100.txt +suggested--eagerness pg3200.txt +suggested. pg3200.txt +suggested: pg3200.txt +suggester pg3200.txt +suggesting pg31100.txt, pg3200.txt +suggestion pg31100.txt, pg3200.txt, pg100.txt +suggestion, pg3200.txt, pg100.txt +suggestion- pg100.txt +suggestion-- pg3200.txt +suggestion. pg31100.txt, pg3200.txt, pg100.txt +suggestion; pg3200.txt +suggestion? pg3200.txt +suggestions pg31100.txt, pg3200.txt +suggestions, pg3200.txt +suggestions. pg3200.txt +suggestions; pg3200.txt, pg100.txt +suggestive pg3200.txt +suggestively pg3200.txt +suggestiveness pg3200.txt +suggests pg3200.txt +suh." pg3200.txt +suh.' pg3200.txt +suicide pg3200.txt +suicide, pg3200.txt +suicide-average. pg3200.txt +suicides pg3200.txt +suis pg3200.txt +suit pg31100.txt, pg3200.txt, pg100.txt +suit! pg100.txt +suit, pg31100.txt, pg3200.txt, pg100.txt +suit- pg100.txt +suit. pg31100.txt, pg3200.txt, pg100.txt +suit." pg31100.txt, pg3200.txt +suit; pg3200.txt, pg100.txt +suit? pg100.txt +suit?" pg3200.txt +suitable pg31100.txt, pg3200.txt +suitably. pg31100.txt +suite pg31100.txt, pg3200.txt +suite, pg3200.txt +suited pg31100.txt, pg3200.txt, pg100.txt +suited! pg100.txt +suited. pg100.txt +suites pg31100.txt +suiting pg100.txt +suitor pg100.txt +suitor, pg100.txt +suitor. pg31100.txt, pg100.txt +suitor? pg100.txt +suitors, pg100.txt +suitors. pg100.txt +suitors." pg3200.txt +suitors; pg100.txt +suits pg31100.txt, pg3200.txt, pg100.txt +suits, pg100.txt +suits. pg100.txt +suits." pg3200.txt +suits? pg100.txt +suits]. pg100.txt +sulky pg3200.txt +sullen pg31100.txt, pg3200.txt, pg100.txt +sullen, pg3200.txt, pg100.txt +sullen. pg100.txt +sullenly-- pg3200.txt +suller pg3200.txt +sully pg100.txt +sully-sur-loire pg3200.txt +sully-sur-loire. pg3200.txt +sulphur pg3200.txt +sulphur! pg100.txt +sulphur-and-raw-meat pg3200.txt +sulphurets--wrote pg3200.txt +sultan pg3200.txt +sultan, pg3200.txt +sultry pg3200.txt +sum pg31100.txt, pg3200.txt, pg100.txt +sum! pg3200.txt, pg100.txt +sum, pg3200.txt, pg100.txt +sum--" pg3200.txt +sum--$38,500! pg3200.txt +sum--indeed, pg3200.txt +sum. pg31100.txt, pg3200.txt, pg100.txt +sum." pg3200.txt +sum? pg100.txt +sumach pg3200.txt +sumf'n. pg3200.txt +sumfn--but pg3200.txt +summarized pg3200.txt +summary pg3200.txt +summed pg3200.txt +summer pg31100.txt, pg3200.txt, pg100.txt +summer! pg100.txt +summer's pg100.txt +summer, pg31100.txt, pg3200.txt, pg100.txt +summer-birds pg100.txt +summer-flies pg100.txt +summer-house, pg3200.txt +summer-house. pg3200.txt +summer-time." pg3200.txt +summer. pg31100.txt, pg3200.txt, pg100.txt +summer." pg31100.txt, pg3200.txt +summer: pg3200.txt +summer; pg31100.txt, pg3200.txt, pg100.txt +summer?" pg31100.txt, pg3200.txt +summers pg31100.txt +summers. pg3200.txt +summersaults pg3200.txt +summit pg3200.txt +summit, pg3200.txt +summit--handkerchiefs pg3200.txt +summit. pg3200.txt +summits pg3200.txt +summits; pg3200.txt +summon pg31100.txt +summon'd pg100.txt +summoned pg31100.txt, pg3200.txt +summoned, pg3200.txt +summoning pg3200.txt +summons pg31100.txt, pg3200.txt, pg100.txt +summons, pg31100.txt +summons? pg100.txt +sumpter pg100.txt +sumptuous pg3200.txt +sumptuously pg3200.txt +sums pg3200.txt, pg100.txt +sums, pg31100.txt, pg3200.txt +sums. pg3200.txt, pg100.txt +sumter pg3200.txt +sun pg31100.txt, pg3200.txt, pg100.txt +sun! pg100.txt +sun!" pg3200.txt +sun, pg3200.txt, pg100.txt +sun- pg100.txt +sun-- pg31100.txt, pg3200.txt +sun--" pg3200.txt +sun-bonnets; pg3200.txt +sun-burned pg3200.txt +sun-cracked pg3200.txt +sun-down." pg3200.txt +sun-flames pg3200.txt +sun-rays, pg3200.txt +sun-umbrellas pg3200.txt +sun-up pg3200.txt +sun-up. pg3200.txt +sun. pg3200.txt, pg100.txt +sun." pg3200.txt +sun.' pg3200.txt +sun.] pg3200.txt +sun; pg3200.txt, pg100.txt +sun? pg100.txt +sunbeam. pg3200.txt +sunburning, pg100.txt +sunda--salary pg3200.txt +sunday pg31100.txt, pg3200.txt +sunday! pg3200.txt +sunday's pg3200.txt +sunday). pg3200.txt +sunday, pg31100.txt, pg3200.txt +sunday- pg3200.txt +sunday--stopped pg3200.txt +sunday-clothes pg3200.txt +sunday-like, pg3200.txt +sunday-like; pg3200.txt +sunday-school pg3200.txt +sunday-school, pg3200.txt +sunday-school. pg3200.txt +sunday-school?" pg3200.txt +sunday-schools pg3200.txt +sunday-schools, pg3200.txt +sunday. pg3200.txt, pg100.txt +sunday." pg3200.txt +sunday.' pg3200.txt +sunday.--m.t. pg3200.txt +sunday; pg31100.txt, pg3200.txt +sundays) pg3200.txt +sundays, pg3200.txt +sundays. pg3200.txt, pg100.txt +sundays; pg3200.txt +sunder, pg100.txt +sunder; pg100.txt +sundered pg3200.txt +sundogs, pg3200.txt +sundown pg3200.txt +sundown" pg3200.txt +sundown, pg3200.txt +sundown. pg3200.txt +sundown." pg3200.txt +sundowner; pg3200.txt +sundries--everything pg3200.txt +sundries. pg3200.txt +sunflower pg3200.txt +sunflower--an pg3200.txt +sunflower. pg3200.txt +sung pg3200.txt, pg100.txt +sung, pg3200.txt, pg100.txt +sung. pg100.txt +sung; pg100.txt +sung] pg100.txt +sunk pg31100.txt, pg3200.txt +sunk, pg3200.txt +sunk--but pg31100.txt +sunk. pg31100.txt +sunken pg3200.txt +sunken, pg100.txt +sunlight, pg3200.txt +sunlight?" pg3200.txt +sunning pg3200.txt +sunny pg3200.txt +sunrise, pg3200.txt +sunrise--people pg3200.txt +sunrise. pg3200.txt +sunrise; pg3200.txt +sunrise?" pg3200.txt +sunrise] pg3200.txt +sunrises. pg3200.txt +suns pg3200.txt +suns- pg100.txt +suns. pg100.txt +suns? pg100.txt +sunset pg3200.txt +sunset, pg3200.txt, pg100.txt +sunset. pg3200.txt +sunset." pg3200.txt +sunsets pg3200.txt +sunsets. pg3200.txt +sunshine pg3200.txt +sunshine, pg3200.txt +sunshine. pg31100.txt, pg3200.txt +sunshine." pg31100.txt, pg3200.txt +sunshiny; pg3200.txt +sunup, pg3200.txt +sunwell, pg3200.txt +suolo, pg3200.txt +sup pg31100.txt, pg100.txt +sup'rintendents?" pg3200.txt +sup. pg100.txt +superabundance pg3200.txt +superadded pg31100.txt +superb pg3200.txt +superb! pg3200.txt +superb, pg3200.txt +superbly pg3200.txt +superciliousness; pg31100.txt +superficially. pg100.txt +superficially; pg100.txt +superfluous. pg100.txt +superflux pg100.txt +superhuman pg3200.txt +superimbrication pg3200.txt +superintend pg3200.txt +superintend. pg3200.txt +superintendence, pg31100.txt +superintendency pg3200.txt +superintendent pg3200.txt +superintendent, pg3200.txt +superintendent. pg3200.txt +superintendents pg3200.txt +superintending pg3200.txt +superintending. pg3200.txt +superintends pg31100.txt +superior pg31100.txt, pg3200.txt +superior!" pg31100.txt +superior, pg31100.txt, pg3200.txt +superior. pg31100.txt, pg3200.txt +superior; pg3200.txt +superiorities pg3200.txt +superiority pg31100.txt +superiority. pg31100.txt, pg3200.txt +superiority; pg31100.txt +superiors pg3200.txt +superiors! pg3200.txt +superiors!' pg3200.txt +superiors. pg3200.txt +superlative. pg3200.txt +supernal pg3200.txt +supernatural pg3200.txt, pg100.txt +supernatural. pg3200.txt +supernatural? pg3200.txt +supernaturally pg3200.txt +superscription pg3200.txt, pg100.txt +superseded pg3200.txt +superseded; pg31100.txt +superstition pg3200.txt, pg100.txt +superstition!" pg3200.txt +superstition, pg3200.txt +superstition-monger, pg3200.txt +superstition. pg3200.txt +superstition; pg3200.txt +superstitions pg3200.txt +superstitions. pg3200.txt +superstitious pg3200.txt +superstitious, pg3200.txt +superstitiously, pg100.txt +supersubtle pg100.txt +supervened pg3200.txt +supervened; pg3200.txt +supervise pg3200.txt +supervision. pg3200.txt +supine pg31100.txt, pg3200.txt +supp'd, pg100.txt +supped pg3200.txt +supper pg31100.txt, pg3200.txt, pg100.txt +supper! pg100.txt +supper, pg31100.txt, pg3200.txt, pg100.txt +supper-table pg3200.txt +supper-table, pg31100.txt, pg3200.txt +supper-time! pg100.txt +supper-time, pg3200.txt +supper-time; pg3200.txt +supper. pg31100.txt, pg3200.txt, pg100.txt +supper." pg31100.txt +supper; pg31100.txt, pg3200.txt +supper? pg100.txt +suppering pg3200.txt +suppers pg3200.txt +suppers, pg3200.txt +suppers--one pg3200.txt +suppers. pg3200.txt +suppertime, pg100.txt +supping? pg100.txt +supplant, pg100.txt +supplanted pg31100.txt +supplanted.--mr. pg31100.txt +supple. pg100.txt +supplement pg3200.txt +supplemented pg3200.txt +suppliant pg3200.txt, pg100.txt +suppliant, pg100.txt +suppliant. pg100.txt +suppliants!" pg3200.txt +supplicant. pg3200.txt +supplicated, pg3200.txt +supplicating pg3200.txt +supplicating, pg3200.txt +supplication pg3200.txt +supplication-- pg31100.txt +supplication. pg31100.txt +supplication: pg3200.txt +supplication? pg100.txt +supplications pg3200.txt, pg100.txt +supplications, pg3200.txt +supplications: pg3200.txt +supplications] pg100.txt +supplied pg31100.txt, pg3200.txt, pg100.txt +supplied, pg31100.txt, pg100.txt +supplied. pg3200.txt +supplied; pg100.txt +supplies pg3200.txt +supplies, pg3200.txt +supplies. pg100.txt +supply pg31100.txt, pg3200.txt, pg100.txt +supply, pg31100.txt, pg100.txt +supply--but pg3200.txt +supply. pg31100.txt, pg100.txt +supply; pg31100.txt, pg100.txt +supply? pg100.txt +supplying pg31100.txt, pg3200.txt +supplyment. pg100.txt +support pg31100.txt, pg3200.txt, pg100.txt +support! pg31100.txt +support!) pg100.txt +support, pg31100.txt, pg3200.txt, pg100.txt +support--archbishop pg31100.txt +support--or pg3200.txt +support. pg31100.txt, pg3200.txt, pg100.txt +support." pg3200.txt +support; pg3200.txt +support?--would pg31100.txt +supportable pg100.txt +supported pg31100.txt, pg3200.txt +supported, pg3200.txt +supported. pg31100.txt, pg3200.txt +supported?" pg31100.txt +supporters pg3200.txt +supporting pg3200.txt +supports pg3200.txt +suppos'd pg100.txt +suppos'd. pg100.txt +supposable pg3200.txt +suppose pg31100.txt, pg3200.txt, pg100.txt +suppose" pg31100.txt +suppose, pg31100.txt, pg3200.txt, pg100.txt +suppose," pg31100.txt +suppose-- pg31100.txt +suppose. pg31100.txt, pg3200.txt +suppose." pg31100.txt, pg3200.txt +suppose.) pg3200.txt +suppose._"] pg31100.txt +suppose: pg3200.txt +suppose; pg31100.txt, pg3200.txt +suppose? pg3200.txt +suppose?" pg31100.txt, pg3200.txt +supposed pg31100.txt, pg3200.txt +supposed, pg31100.txt +supposed--and pg31100.txt +supposed. pg31100.txt, pg3200.txt +supposed." pg31100.txt, pg3200.txt +supposed; pg31100.txt, pg3200.txt +supposes pg31100.txt +supposing pg31100.txt, pg3200.txt +supposing, pg3200.txt +supposing--" pg31100.txt +supposing. pg31100.txt +supposition pg3200.txt, pg100.txt +supposition, pg31100.txt +supposition. pg3200.txt +suppositions, pg31100.txt +suppositions:-- pg31100.txt +supposititious pg3200.txt +suppress pg31100.txt, pg3200.txt, pg100.txt +suppress'd. pg100.txt +suppressed pg3200.txt +suppressed, pg3200.txt +suppressed. pg3200.txt +suppressed: pg100.txt +suppresses pg3200.txt +suppressing pg3200.txt +supremacies, pg3200.txt +supremacy pg3200.txt +supremacy, pg3200.txt, pg100.txt +supremacy. pg3200.txt +supremacy; pg100.txt +supreme pg3200.txt +supreme. pg3200.txt +supremely pg3200.txt +supremest pg3200.txt +sups pg100.txt +sups. pg100.txt +sur pg100.txt +surcease; pg100.txt +sure pg31100.txt, pg3200.txt, pg100.txt +sure!" pg3200.txt +sure!' pg3200.txt +sure's pg3200.txt +sure, pg31100.txt, pg3200.txt, pg100.txt +sure," pg31100.txt +sure,"--and pg3200.txt +sure-- pg3200.txt +sure--if pg3200.txt +sure. pg31100.txt, pg3200.txt, pg100.txt +sure." pg31100.txt, pg3200.txt +sure.-- pg31100.txt +sure: pg3200.txt +sure; pg31100.txt, pg3200.txt, pg100.txt +sure? pg3200.txt, pg100.txt +sure?" pg3200.txt +surely pg31100.txt, pg3200.txt, pg100.txt +surely! pg100.txt +surely!" pg3200.txt +surely, pg3200.txt, pg100.txt +surely. pg100.txt +surely; pg3200.txt +surely?" pg31100.txt, pg3200.txt +surer." pg3200.txt +surest pg3200.txt +surest." pg3200.txt +surety pg3200.txt, pg100.txt +surety, pg100.txt +surety. pg100.txt +surf pg3200.txt +surf-bathing. pg3200.txt +surface pg3200.txt +surface).--the pg3200.txt +surface, pg3200.txt +surface-miner pg3200.txt +surface-mining. pg3200.txt +surface. pg3200.txt +surface; pg3200.txt +surfaces pg3200.txt +surfaces-- pg3200.txt +surfeit pg3200.txt +surfeit. pg3200.txt, pg100.txt +surfeited pg3200.txt +surfeited. pg100.txt +surfeiting, pg100.txt +surge pg100.txt +surge, pg100.txt +surgeon pg3200.txt +surgeon! pg100.txt +surgeon's pg3200.txt +surgeon's,--i pg3200.txt +surgeon's. pg100.txt +surgeon, pg31100.txt, pg3200.txt +surgeon--for pg3200.txt +surgeon. pg3200.txt, pg100.txt +surgeon; pg100.txt +surgeons pg3200.txt +surgeons, pg3200.txt +surgeons--the pg3200.txt +surgeons--you pg3200.txt +surgeons. pg100.txt +surgeons; pg3200.txt +surgery pg3200.txt, pg100.txt +surgery. pg3200.txt, pg100.txt +surgical pg3200.txt +surging pg3200.txt +surging, pg3200.txt +surinam pg3200.txt +surly pg3200.txt, pg100.txt +surmise pg31100.txt, pg3200.txt, pg100.txt +surmise, pg100.txt +surmise. pg31100.txt, pg100.txt +surmise; pg100.txt +surmises, pg31100.txt, pg100.txt +surmising pg31100.txt +surmount, pg100.txt +surmount. pg100.txt +surmounted pg3200.txt +surmounted. pg3200.txt +surmounting pg31100.txt +surname): pg3200.txt +surname. pg3200.txt +surnamed pg3200.txt +surpass pg3200.txt +surpass. pg31100.txt +surpassed pg31100.txt, pg3200.txt +surpassed, pg3200.txt +surpassed.' pg3200.txt +surpassing pg31100.txt, pg3200.txt, pg100.txt +surplusage pg3200.txt +surplusage. pg3200.txt +surpris'd pg100.txt +surpris'd! pg100.txt +surpris'd, pg100.txt +surpris'd. pg100.txt +surpris'd? pg100.txt +surprise pg31100.txt, pg3200.txt, pg100.txt +surprise, pg31100.txt, pg3200.txt, pg100.txt +surprise," pg31100.txt +surprise-- pg3200.txt +surprise--and pg3200.txt +surprise--that pg3200.txt +surprise. pg31100.txt, pg3200.txt +surprise." pg31100.txt +surprise.] pg3200.txt +surprise: pg3200.txt +surprise; pg31100.txt, pg3200.txt +surprise?" pg31100.txt, pg3200.txt +surprised pg31100.txt, pg3200.txt +surprised, pg31100.txt, pg3200.txt +surprised," pg31100.txt +surprised. pg31100.txt, pg3200.txt +surprised; pg31100.txt, pg3200.txt +surprises pg31100.txt, pg3200.txt +surprises, pg3200.txt +surprises; pg3200.txt +surprising pg31100.txt, pg3200.txt +surprising, pg3200.txt +surprising. pg3200.txt +surprising; pg3200.txt +surprize pg31100.txt +surprize. pg31100.txt +surprized pg31100.txt +surprized, pg31100.txt +surprized. pg31100.txt +surrender pg3200.txt, pg100.txt +surrender." pg3200.txt +surrendered pg3200.txt +surrendered, pg3200.txt +surrendered. pg3200.txt +surrenders." pg3200.txt +surreptitious pg3200.txt +surreptitiously pg3200.txt +surrey pg100.txt +surrey, pg100.txt +surrounded pg31100.txt, pg3200.txt +surrounding pg31100.txt, pg3200.txt +surroundings pg3200.txt +surroundings, pg3200.txt +surroundings--although pg3200.txt +surroundings. pg3200.txt +surroundings; pg3200.txt +surrounds pg3200.txt +surry, pg31100.txt +surry." pg31100.txt +survey pg31100.txt, pg3200.txt, pg100.txt +survey, pg3200.txt, pg100.txt +surveyed pg3200.txt +surveying pg31100.txt, pg3200.txt +surveyor pg31100.txt, pg3200.txt, pg100.txt +surveyor, pg3200.txt, pg100.txt +surveys pg100.txt +surveys, pg3200.txt +survival; pg3200.txt +survive pg31100.txt, pg3200.txt, pg100.txt +survive, pg3200.txt, pg100.txt +survive--country pg3200.txt +survive. pg3200.txt +survived pg31100.txt, pg3200.txt +survived, pg3200.txt +survived--happily pg3200.txt +survived. pg3200.txt +survived: pg3200.txt +survives, pg100.txt +surviving pg31100.txt, pg3200.txt +survivor." pg31100.txt +survivors pg3200.txt +survivors, pg3200.txt +susan pg31100.txt, pg3200.txt +susan!" pg3200.txt +susan's pg31100.txt +susan's. pg31100.txt +susan, pg31100.txt, pg3200.txt +susan--" pg3200.txt +susan; pg31100.txt +susceptible pg31100.txt +susie pg3200.txt +suspect pg31100.txt, pg3200.txt, pg100.txt +suspect, pg31100.txt, pg100.txt +suspect-- pg3200.txt +suspect--but pg31100.txt +suspect--they pg3200.txt +suspect. pg3200.txt, pg100.txt +suspect? pg3200.txt +suspected pg31100.txt, pg3200.txt, pg100.txt +suspected, pg31100.txt, pg3200.txt, pg100.txt +suspected--humour. pg3200.txt +suspected. pg31100.txt, pg3200.txt +suspected." pg3200.txt +suspected; pg31100.txt, pg3200.txt +suspecting pg31100.txt, pg3200.txt +suspects pg3200.txt +suspects. pg100.txt +suspence pg31100.txt +suspend pg3200.txt, pg100.txt +suspend, pg3200.txt +suspended pg31100.txt, pg3200.txt +suspended. pg31100.txt +suspender pg3200.txt +suspender, pg3200.txt +suspense pg31100.txt, pg3200.txt +suspense!" pg3200.txt +suspense, pg31100.txt, pg3200.txt +suspense--" pg3200.txt +suspense. pg31100.txt, pg3200.txt, pg100.txt +suspension pg31100.txt +suspension. pg3200.txt +suspicion pg31100.txt, pg3200.txt, pg100.txt +suspicion! pg100.txt +suspicion!" pg3200.txt +suspicion"-- pg31100.txt +suspicion, pg31100.txt, pg3200.txt, pg100.txt +suspicion--" pg3200.txt +suspicion--(and pg3200.txt +suspicion--garibaldi pg3200.txt +suspicion. pg31100.txt, pg3200.txt, pg100.txt +suspicion." pg31100.txt +suspicion; pg3200.txt, pg100.txt +suspicion? pg100.txt +suspicioned pg3200.txt +suspicions pg31100.txt, pg3200.txt +suspicions, pg31100.txt, pg3200.txt +suspicions--suspicions pg3200.txt +suspicious pg3200.txt, pg100.txt +suspicious. pg31100.txt, pg3200.txt +suspicious; pg3200.txt, pg100.txt +suspicious?" pg3200.txt +suspicious?' pg3200.txt +suspiciously pg31100.txt +suspire, pg100.txt +sussex pg31100.txt +sussex, pg31100.txt +sussex. pg31100.txt +sussex." pg31100.txt +sussex?" pg31100.txt +sustain pg3200.txt, pg100.txt +sustain. pg100.txt +sustain; pg100.txt +sustained pg3200.txt +sustained. pg3200.txt +sustained; pg3200.txt +sustaining pg3200.txt +sustaining--brim pg3200.txt +sustains pg3200.txt +sustenance pg3200.txt +sustenance, pg3200.txt +sustenance. pg3200.txt +susy pg3200.txt +susy's pg3200.txt +susy, pg3200.txt +susy. pg3200.txt +susy; pg3200.txt +suthin' pg3200.txt +sutler pg100.txt +sutler. pg3200.txt +suttee pg3200.txt +suttee, pg3200.txt +swabbed pg3200.txt +swaddling pg100.txt +swag." pg3200.txt +swagg'rers. pg100.txt +swagger pg100.txt +swagger'd pg100.txt +swaggered pg3200.txt +swaggerers. pg100.txt +swaggering pg100.txt +swah." pg3200.txt +swain pg100.txt +swain). pg3200.txt +swain, pg100.txt +swain. pg100.txt +swain; pg100.txt +swain? pg100.txt +swaller pg3200.txt +swallow pg3200.txt +swallow'd pg100.txt +swallow'd. pg100.txt +swallow's pg3200.txt +swallow, pg3200.txt +swallow-tail pg3200.txt +swallow-tail. pg3200.txt +swallow-tailed, pg3200.txt +swallow-tails. pg3200.txt +swallowed pg31100.txt, pg3200.txt +swallowed: pg3200.txt +swallowing pg3200.txt +swallowing, pg3200.txt +swallows pg3200.txt, pg100.txt +swam pg3200.txt +swam, pg100.txt +swami pg3200.txt +swamp, pg3200.txt +swamp. pg3200.txt +swan pg100.txt +swan, pg3200.txt, pg100.txt +swans, pg100.txt +swansea, pg3200.txt +swap pg3200.txt +sward pg3200.txt +swarm pg3200.txt, pg100.txt +swarmed pg3200.txt +swarming pg3200.txt +swarms pg3200.txt +swarthy pg3200.txt +swarthy, pg3200.txt +swashbuckler, pg3200.txt +swashers. pg100.txt +swath. pg100.txt +swathed pg3200.txt +sway pg100.txt +sway'd pg100.txt +sway'd, pg100.txt +sway'st pg100.txt +sway, pg100.txt +sway. pg3200.txt, pg100.txt +swaying pg3200.txt +sways, pg3200.txt, pg100.txt +swear pg3200.txt, pg100.txt +swear! pg100.txt +swear!" pg3200.txt +swear'st, pg100.txt +swear't. pg100.txt +swear, pg100.txt +swear- pg100.txt +swear. pg3200.txt, pg100.txt +swear." pg3200.txt +swear.] pg100.txt +swear; pg100.txt +swear? pg100.txt +swearers pg3200.txt +swearing pg3200.txt, pg100.txt +swearing! pg100.txt +swearing, pg3200.txt, pg100.txt +swearing. pg3200.txt, pg100.txt +swearings, pg100.txt +swears pg31100.txt, pg3200.txt, pg100.txt +swears, pg100.txt +swears. pg3200.txt, pg100.txt +sweat pg31100.txt, pg3200.txt, pg100.txt +sweat, pg3200.txt, pg100.txt +sweat--" pg3200.txt +sweat. pg3200.txt, pg100.txt +sweat; pg100.txt +sweat? pg100.txt +sweated pg3200.txt +sweaten pg100.txt +sweateth pg3200.txt +sweating pg3200.txt, pg100.txt +sweats, pg100.txt +sweden pg3200.txt +swedenborg, pg3200.txt +swedenborgian, pg3200.txt +swedenborgians, pg3200.txt +sweep pg3200.txt +sweep. pg31100.txt, pg3200.txt +sweeping pg31100.txt, pg3200.txt +sweeping..... pg3200.txt +sweeping; pg3200.txt +sweeps pg3200.txt +sweeps!" pg3200.txt +sweet pg31100.txt, pg3200.txt, pg100.txt +sweet! pg3200.txt +sweet, pg3200.txt, pg100.txt +sweet- pg3200.txt, pg100.txt +sweet-cake pg31100.txt +sweet. pg3200.txt, pg100.txt +sweet; pg3200.txt, pg100.txt +sweet? pg100.txt +sweeten pg100.txt +sweetened pg3200.txt +sweeter pg31100.txt, pg3200.txt, pg100.txt +sweetest pg31100.txt, pg3200.txt +sweetheart pg3200.txt +sweetheart" pg3200.txt +sweetheart's pg100.txt +sweetheart, pg3200.txt +sweetheart. pg3200.txt, pg100.txt +sweetheart? pg100.txt +sweetheart?" pg3200.txt +sweethearts, pg3200.txt +sweethearts. pg3200.txt +sweeting; pg100.txt +sweetly pg3200.txt, pg100.txt +sweetly, pg3200.txt +sweetly--this pg3200.txt +sweetly. pg100.txt +sweetmeats pg3200.txt +sweetness pg31100.txt, pg3200.txt +sweetness! pg100.txt +sweetness!--latterly pg31100.txt +sweetness, pg31100.txt, pg3200.txt, pg100.txt +sweetness: pg3200.txt +sweets pg3200.txt, pg100.txt +sweets! pg100.txt +sweets, pg100.txt +sweets. pg100.txt +sweets: pg100.txt +swell pg31100.txt, pg3200.txt, pg100.txt +swell, pg3200.txt +swell. pg3200.txt +swelled pg31100.txt, pg3200.txt +swelled, pg3200.txt +swelling pg31100.txt, pg3200.txt +swelling, pg3200.txt +swells pg3200.txt, pg100.txt +swells, pg100.txt +swells. pg3200.txt +swelter pg3200.txt +sweltering pg3200.txt +swept pg31100.txt, pg3200.txt +swerve; pg100.txt +swerving. pg100.txt +swift pg3200.txt, pg100.txt +swift, pg3200.txt, pg100.txt +swift-footed pg100.txt +swift. pg3200.txt +swift; pg100.txt +swifter, pg3200.txt +swiftly pg3200.txt, pg100.txt +swiftly, pg100.txt +swiftly. pg31100.txt +swiftly." pg3200.txt +swiftness pg100.txt +swiftness. pg3200.txt +swim pg3200.txt, pg100.txt +swim! pg100.txt +swim. pg3200.txt +swim." pg3200.txt +swim; pg3200.txt +swimmer, pg100.txt +swimmers pg3200.txt +swimming pg3200.txt +swimming! pg3200.txt +swimming, pg3200.txt +swimming-hole?" pg3200.txt +swims pg3200.txt +swims, pg3200.txt +swims. pg100.txt +swindle pg3200.txt +swindle!" pg3200.txt +swindle. pg3200.txt +swindle?" pg3200.txt +swindled. pg3200.txt +swindler pg3200.txt +swindler, pg3200.txt +swindlers pg3200.txt +swindlers! pg3200.txt +swindlers, pg3200.txt +swindles, pg3200.txt +swine pg3200.txt, pg100.txt +swine!" pg3200.txt +swine-drunk; pg100.txt +swine-herds--" pg3200.txt +swine. pg3200.txt, pg100.txt +swineherd?" pg3200.txt +swing pg3200.txt +swing. pg3200.txt +swinging pg31100.txt, pg3200.txt +swings pg3200.txt +swings. pg3200.txt +swish pg3200.txt +swished pg3200.txt +swiss pg3200.txt +swisserland. pg31100.txt +switch pg3200.txt +switch." pg3200.txt +switch; pg31100.txt +switches. pg3200.txt +switchman pg3200.txt +switzerland pg3200.txt +switzerland, pg31100.txt, pg3200.txt +switzerland--yes, pg3200.txt +switzerland. pg3200.txt +switzerland: pg3200.txt +switzerland? pg3200.txt +swollen pg3200.txt +swoon pg3200.txt, pg100.txt +swoon, pg3200.txt +swoon. pg31100.txt, pg3200.txt +swoon? pg100.txt +swooned pg3200.txt +swoons.] pg100.txt +swoons; pg100.txt +swoons] pg100.txt +swoons]. pg100.txt +swoop. pg3200.txt +swoop? pg100.txt +swooped pg3200.txt +swor'st pg100.txt +sword pg3200.txt, pg100.txt +sword! pg100.txt +sword, pg3200.txt, pg100.txt +sword- pg100.txt +sword-cuts; pg3200.txt +sword-men. pg100.txt +sword-play pg3200.txt +sword. pg3200.txt, pg100.txt +sword." pg3200.txt +sword.] pg100.txt +sword; pg3200.txt, pg100.txt +sword? pg100.txt +sword?" pg3200.txt +sword] pg100.txt +swords pg3200.txt, pg100.txt +swords, pg3200.txt, pg100.txt +swords. pg3200.txt, pg100.txt +swords.] pg100.txt +swords; pg3200.txt, pg100.txt +swords? pg100.txt +swords] pg100.txt +swordsman, pg3200.txt +swore pg3200.txt, pg100.txt +swore, pg100.txt +swore. pg100.txt +sworn pg31100.txt, pg3200.txt, pg100.txt +sworn! pg100.txt +sworn't. pg100.txt +sworn, pg100.txt +sworn- pg100.txt +sworn. pg3200.txt, pg100.txt +sworn; pg100.txt +swosh-swosh, pg3200.txt +swung pg3200.txt +syca-i pg3200.txt +sycamore pg100.txt +sycamore, pg3200.txt +sycamore- pg3200.txt +sycamores, pg3200.txt +sycamores. pg3200.txt +sycamores." pg3200.txt +sycamores?" pg3200.txt +sycophancy pg3200.txt +sycorax pg100.txt +sycorax, pg100.txt +sydney pg3200.txt +sydney, pg3200.txt +sydney--spring-time pg3200.txt +sydney. pg3200.txt +sydney." pg3200.txt +syllable pg31100.txt, pg3200.txt +syllable, pg31100.txt, pg3200.txt +syllable. pg3200.txt, pg100.txt +syllable: pg100.txt +syllables pg3200.txt, pg100.txt +syllables, pg3200.txt +sylph-like pg3200.txt +sylva. pg3200.txt +symbol pg3200.txt +symbol. pg3200.txt +symbolical pg3200.txt +symbolizes. pg3200.txt +symbols pg3200.txt +symmetrical pg3200.txt +symmetrically, pg3200.txt +symmetry pg31100.txt, pg3200.txt +symmetry, pg3200.txt +symmetry." pg3200.txt +sympathetic pg31100.txt, pg3200.txt +sympathetic. pg31100.txt +sympathies pg3200.txt +sympathies. pg3200.txt +sympathies; pg3200.txt +sympathise, pg100.txt +sympathiser. pg31100.txt +sympathize pg3200.txt, pg100.txt +sympathize, pg100.txt +sympathized pg3200.txt +sympathized, pg100.txt +sympathizers pg3200.txt +sympathizers, pg3200.txt +sympathizingly. pg3200.txt +sympathy pg31100.txt, pg3200.txt +sympathy! pg100.txt +sympathy, pg31100.txt, pg3200.txt, pg100.txt +sympathy--he pg3200.txt +sympathy. pg31100.txt, pg3200.txt, pg100.txt +sympathy." pg3200.txt +symphonious pg3200.txt +symptom pg31100.txt +symptom. pg31100.txt +symptoms pg31100.txt, pg3200.txt +symptoms. pg31100.txt +synagogue pg3200.txt +syndicate pg3200.txt +syndicate, pg3200.txt +syndicate. pg3200.txt +synod pg100.txt +synonymous pg3200.txt +synonymous. pg3200.txt +synonymous." pg31100.txt +synopses pg3200.txt +synopsis, pg3200.txt +synopsis. pg3200.txt +synopsize. pg3200.txt +syraccus, pg3200.txt +syracuse pg100.txt +syracuse. pg100.txt +syria pg3200.txt, pg100.txt +syria, pg3200.txt +syria--and pg3200.txt +syria. pg3200.txt +syrian pg3200.txt +syrian, pg3200.txt +syrup--could pg3200.txt +syrup. pg3200.txt +system pg31100.txt, pg3200.txt +system, pg31100.txt, pg3200.txt +system-- pg3200.txt +system. pg3200.txt +system: pg3200.txt +system?" pg3200.txt +systematically, pg3200.txt +systems pg3200.txt +systems, pg3200.txt +systems. pg3200.txt +szczepanik pg3200.txt +szczepanik. pg3200.txt +szczepanik.' pg3200.txt +t' pg3200.txt +t'll--" pg3200.txt +t'other pg3200.txt, pg100.txt +t'other, pg3200.txt +t'other,--nothing pg3200.txt +t'other. pg100.txt +t's, pg3200.txt +t----l. pg3200.txt +t.--1860--x"; pg3200.txt +t.--1860--x." pg3200.txt +t.:] pg3200.txt +t.] pg3200.txt +t.]] pg3200.txt +t? pg100.txt +ta'en pg100.txt +ta'en, pg100.txt +ta'en. pg100.txt +ta'en; pg100.txt +ta'en? pg100.txt +tabard pg3200.txt +tabard. pg3200.txt +tabard: pg3200.txt +taber. pg100.txt +table pg31100.txt, pg3200.txt, pg100.txt +table! pg3200.txt, pg100.txt +table) pg31100.txt +table): pg3200.txt +table, pg31100.txt, pg3200.txt, pg100.txt +table--a pg3200.txt +table--but pg3200.txt +table--contractions pg31100.txt +table--i pg3200.txt +table--the pg3200.txt +table-book] pg100.txt +table-cloth pg3200.txt +table-fare pg3200.txt +table-room pg3200.txt +table-talk; pg100.txt +table-topped pg3200.txt +table. pg31100.txt, pg3200.txt, pg100.txt +table." pg31100.txt, pg3200.txt +table: pg31100.txt, pg3200.txt +table; pg31100.txt, pg100.txt +table?" pg3200.txt +table?' pg3200.txt +tableau.] pg3200.txt +tables pg3200.txt +tables, pg31100.txt, pg3200.txt +tables. pg3200.txt, pg100.txt +tables: pg3200.txt +tablespoonful pg3200.txt +tablets pg31100.txt +tabooed, pg3200.txt +tabor pg3200.txt, pg100.txt +tabor, pg3200.txt, pg100.txt +tabor? pg100.txt +tabourines, pg100.txt +tabu'd pg3200.txt +tabu. pg3200.txt +tabulates pg3200.txt +tacit pg3200.txt +tacitly pg3200.txt +tacitly, pg3200.txt +taciturnity. pg100.txt +tack, pg3200.txt +tack." pg3200.txt +tackle pg3200.txt, pg100.txt +tackle. pg3200.txt +tackled pg3200.txt +tacklings? pg100.txt +tacks pg3200.txt +tact pg3200.txt +tact, pg3200.txt +tact. pg3200.txt +tactics pg3200.txt +tactics, pg3200.txt +tadpole pg3200.txt +tadpole--until pg3200.txt +taffeta. pg100.txt +taffy. pg3200.txt +tag pg3200.txt +tag." pg3200.txt +tagged pg3200.txt +tags pg3200.txt +tags, pg3200.txt +tah, pg100.txt +tahlequah." pg3200.txt +tahlequah?" pg3200.txt +tahoe pg3200.txt +tahoe. pg3200.txt +tail pg3200.txt, pg100.txt +tail! pg100.txt +tail!" pg3200.txt +tail, pg3200.txt, pg100.txt +tail. pg3200.txt, pg100.txt +tail; pg100.txt +taile, pg3200.txt +tailings, pg3200.txt +tailor pg100.txt +tailor's pg3200.txt +tailor, pg3200.txt, pg100.txt +tailor. pg3200.txt, pg100.txt +tailor; pg100.txt +tailor? pg100.txt +tailors pg100.txt +tailors, pg3200.txt, pg100.txt +tailors. pg100.txt +tails pg3200.txt +tails. pg3200.txt +tails." pg3200.txt +taint pg3200.txt, pg100.txt +taint. pg100.txt +tainted pg3200.txt, pg100.txt +tainted." pg31100.txt +tainted; pg100.txt +taints pg100.txt +taj pg3200.txt +taj's pg3200.txt +taj." pg3200.txt +tak'st pg100.txt +take! pg3200.txt +take't pg100.txt +take't, pg100.txt +take't. pg100.txt +take, pg3200.txt, pg100.txt +take--the pg3200.txt +take--what pg3200.txt +take. pg3200.txt, pg100.txt +take." pg3200.txt +take; pg100.txt +take? pg100.txt +take?" pg3200.txt +takee pg3200.txt +taken pg31100.txt, pg3200.txt, pg100.txt +taken! pg3200.txt +taken!" pg3200.txt +taken, pg31100.txt, pg3200.txt, pg100.txt +taken- pg100.txt +taken--it pg3200.txt +taken. pg31100.txt, pg3200.txt, pg100.txt +taken." pg31100.txt, pg3200.txt +taken? pg100.txt +takes pg31100.txt, pg3200.txt, pg100.txt +takes, pg100.txt +takes. pg100.txt +takes: pg3200.txt, pg100.txt +takes; pg3200.txt, pg100.txt +takest, pg100.txt +taking pg31100.txt, pg3200.txt, pg100.txt +taking-off, pg100.txt +taking. pg3200.txt +takings. pg3200.txt +talbot pg100.txt +talbot!'] pg100.txt +talbot's pg100.txt +talbot, pg3200.txt, pg100.txt +talbot. pg100.txt +talbot? pg100.txt +talc pg3200.txt +tale pg3200.txt, pg100.txt +tale, pg31100.txt, pg3200.txt, pg100.txt +tale--"the pg3200.txt +tale. pg31100.txt, pg3200.txt, pg100.txt +tale." pg3200.txt +tale; pg3200.txt, pg100.txt +tale?" pg3200.txt +talent pg31100.txt, pg3200.txt +talent! pg3200.txt, pg100.txt +talent, pg31100.txt, pg3200.txt +talent. pg31100.txt, pg3200.txt, pg100.txt +talent." pg31100.txt +talents pg31100.txt, pg3200.txt +talents, pg3200.txt, pg100.txt +talents. pg3200.txt, pg100.txt +taleporter, pg100.txt +tales pg3200.txt, pg100.txt +tales, pg31100.txt, pg3200.txt, pg100.txt +tales. pg100.txt +talisman. pg3200.txt +talk pg31100.txt, pg3200.txt, pg100.txt +talk! pg31100.txt, pg3200.txt, pg100.txt +talk'd pg100.txt +talk'd, pg100.txt +talk'd; pg100.txt +talk'st! pg100.txt +talk, pg31100.txt, pg3200.txt, pg100.txt +talk--all pg3200.txt +talk--i pg3200.txt +talk--most pg3200.txt +talk-machinery pg3200.txt +talk-talk-talking. pg3200.txt +talk. pg31100.txt, pg3200.txt, pg100.txt +talk." pg31100.txt, pg3200.txt +talk; pg3200.txt, pg100.txt +talk? pg3200.txt, pg100.txt +talk?" pg3200.txt +talkative, pg31100.txt +talkative. pg31100.txt +talkative." pg31100.txt +talked pg31100.txt, pg3200.txt +talked, pg3200.txt +talked--subject, pg3200.txt +talked. pg3200.txt +talked." pg3200.txt +talked; pg3200.txt +talked;--but pg3200.txt +talked? pg3200.txt +talker!--and pg31100.txt +talker"--that pg3200.txt +talker, pg3200.txt +talker. pg3200.txt, pg100.txt +talker; pg31100.txt +talkers pg3200.txt +talkers, pg3200.txt +talkin' pg3200.txt +talking pg31100.txt, pg3200.txt, pg100.txt +talking, pg31100.txt, pg3200.txt +talking. pg31100.txt, pg3200.txt, pg100.txt +talking." pg31100.txt, pg3200.txt +talking; pg3200.txt +talking?" pg31100.txt, pg3200.txt +talks pg31100.txt, pg3200.txt, pg100.txt +talks, pg3200.txt +talks. pg3200.txt +talks?" pg3200.txt +talky, pg3200.txt +talkyng---- pg3200.txt +tall pg31100.txt, pg3200.txt, pg100.txt +tall!" pg31100.txt, pg3200.txt +tall!--oh, pg31100.txt +tall, pg31100.txt, pg3200.txt, pg100.txt +tall. pg100.txt +tall." pg3200.txt +tall; pg31100.txt, pg100.txt +taller pg31100.txt +taller." pg31100.txt +tallest. pg100.txt +tallest." pg31100.txt +tallied pg3200.txt +tallow pg3200.txt +tallow--served pg3200.txt +tallow-catch- pg100.txt +tallow-face! pg100.txt +tallow. pg100.txt +tallow? pg100.txt +tally pg3200.txt +tally?" pg3200.txt +talmage, pg3200.txt +talmage?" pg3200.txt +talmud pg3200.txt +talon, pg3200.txt +talon- pg3200.txt +talons; pg100.txt +talons? pg100.txt +talunga pg3200.txt +tam'd pg100.txt +tamarinds pg3200.txt +tamarinds--but pg3200.txt +tambour pg31100.txt +tame pg31100.txt, pg3200.txt +tame, pg3200.txt, pg100.txt +tame. pg100.txt +tame; pg100.txt +tamed pg3200.txt +tamely, pg100.txt +tameness pg31100.txt +tamerlane, pg3200.txt +tames pg3200.txt +taming pg3200.txt +taming-school. pg100.txt +tammany pg3200.txt +tammany, pg3200.txt +tammany. pg3200.txt +tammany; pg3200.txt +tamora pg100.txt +tamora, pg100.txt +tamora. pg100.txt +tamora: pg100.txt +tamper pg3200.txt +tamworth pg100.txt +tan-colored pg3200.txt +tan." pg3200.txt +tang, pg100.txt +tangible, pg3200.txt +tangier pg3200.txt +tangier, pg3200.txt +tangier. pg3200.txt +tangle pg3200.txt +tangled pg3200.txt +tanglesome pg3200.txt +tank pg3200.txt +tank, pg3200.txt +tank. pg3200.txt +tanlings pg100.txt +tanner pg3200.txt, pg100.txt +tantalizing pg3200.txt +tanyard pg3200.txt +tanyard. pg3200.txt +tap pg31100.txt, pg3200.txt, pg100.txt +tap), pg3200.txt +tap-room pg3200.txt +tap; pg100.txt +tape, pg100.txt +taper pg100.txt +taper-light pg100.txt +taper. pg100.txt +tapered pg3200.txt +tapering pg3200.txt +tapers pg3200.txt, pg100.txt +tapers, pg100.txt +tapers. pg100.txt +tapestries pg3200.txt +tapestry pg100.txt +tapestry. pg3200.txt +tapestry; pg100.txt +tapestry?" pg31100.txt +tapped pg3200.txt +tapping pg3200.txt +taps pg3200.txt +tapster. pg100.txt +tapster? pg100.txt +tapsters, pg100.txt +taquiner), pg3200.txt +tar, pg3200.txt +tar. pg3200.txt +taranaki pg3200.txt +tarantulas pg3200.txt +tarantulas. pg3200.txt +tardied pg100.txt +tardily pg100.txt +tardy, pg100.txt +target pg3200.txt +target!'" pg3200.txt +target, pg3200.txt +target." pg3200.txt +tarifa, pg3200.txt +tariff; pg3200.txt +tarnish pg3200.txt +tarnished pg3200.txt +tarnishing pg3200.txt +taroom pg3200.txt +taroom, pg3200.txt +tarpeian, pg100.txt +tarquins. pg100.txt +tarred pg3200.txt +tarried pg3200.txt +tarried. pg100.txt +tarried? pg100.txt +tarry pg3200.txt, pg100.txt +tarry, pg3200.txt +tarry. pg3200.txt, pg100.txt +tarry; pg100.txt +tarrytown pg3200.txt +tarsus, pg3200.txt +tart pg31100.txt +tart-- pg3200.txt +tartness. pg100.txt +task pg31100.txt, pg3200.txt, pg100.txt +task'd, pg100.txt +task, pg31100.txt, pg3200.txt, pg100.txt +task. pg3200.txt, pg100.txt +task? pg100.txt +tasks pg3200.txt +tasks. pg100.txt +tasmania, pg3200.txt +tassait pg3200.txt +tassels pg3200.txt +tassels, pg3200.txt +taste pg31100.txt, pg3200.txt, pg100.txt +taste! pg100.txt +taste!" pg31100.txt +taste, pg31100.txt, pg3200.txt, pg100.txt +taste--he pg31100.txt +taste. pg31100.txt, pg3200.txt, pg100.txt +taste." pg31100.txt, pg3200.txt +taste: pg31100.txt, pg100.txt +taste; pg100.txt +taste? pg100.txt +tasted pg31100.txt, pg3200.txt +tasted, pg100.txt +tasteful pg3200.txt +tastefulness. pg3200.txt +tasteless pg3200.txt +tasteless, pg3200.txt +taster, pg3200.txt +tasters pg3200.txt +tastes pg3200.txt +tastes. pg3200.txt +tastes." pg3200.txt +tasting pg31100.txt, pg100.txt +tattered pg3200.txt +tatters pg3200.txt +tattling. pg100.txt +tattoo-marks. pg3200.txt +tattooing, pg3200.txt +taub--blind--todt! pg3200.txt +taught pg31100.txt, pg3200.txt, pg100.txt +taught, pg3200.txt +taught,' pg3200.txt +taught. pg31100.txt, pg3200.txt, pg100.txt +taugwalder pg3200.txt +taugwalders, pg3200.txt +taunt pg3200.txt +taunts pg100.txt +taunts! pg100.txt +taunts, pg100.txt +tauranga pg3200.txt +taurus! pg100.txt +taurus. pg100.txt +taurus? pg100.txt +tautology pg31100.txt, pg3200.txt +tavern pg3200.txt, pg100.txt +tavern" pg3200.txt +tavern, pg100.txt +tavern. pg3200.txt, pg100.txt +tavern? pg100.txt +taverns pg3200.txt +taverns, pg3200.txt, pg100.txt +taverns." pg31100.txt +tavernward pg3200.txt +taw." pg3200.txt +tawdry pg3200.txt +tawny. pg100.txt +tax pg31100.txt, pg3200.txt, pg100.txt +tax! pg3200.txt +tax, pg3200.txt +tax-payers? pg3200.txt +tax. pg3200.txt +tax; pg3200.txt +tax? pg3200.txt +taxation pg3200.txt +taxation! pg100.txt +taxation, pg3200.txt +taxation? pg100.txt +taxations, pg100.txt +taxed pg3200.txt +taxed, pg3200.txt +taxed. pg3200.txt +taxed." pg3200.txt +taxes pg3200.txt +taxes, pg3200.txt +taxes. pg3200.txt +taxes: pg3200.txt +taxes; pg100.txt +taxing pg3200.txt +taylor pg31100.txt, pg3200.txt +taylor's pg31100.txt +taylor, pg31100.txt, pg3200.txt +taylor,92 pg3200.txt +taylor--that pg31100.txt +taylor." pg31100.txt +tea pg31100.txt, pg3200.txt +tea, pg31100.txt, pg3200.txt +tea-group pg3200.txt +tea-table, pg31100.txt +tea-things, pg31100.txt +tea. pg31100.txt, pg3200.txt +tea." pg31100.txt, pg3200.txt +tea; pg31100.txt +tea?" pg3200.txt +teach pg31100.txt, pg3200.txt, pg100.txt +teach, pg31100.txt, pg3200.txt, pg100.txt +teach--unrebuked. pg3200.txt +teach. pg3200.txt, pg100.txt +teach: pg100.txt +teach; pg100.txt +teacher pg31100.txt, pg3200.txt +teacher! pg3200.txt +teacher's pg3200.txt +teacher, pg3200.txt +teacher. pg3200.txt +teacher; pg3200.txt +teachers pg31100.txt, pg3200.txt, pg100.txt +teachers, pg3200.txt +teachers. pg3200.txt +teachers." pg3200.txt +teachers; pg3200.txt +teaches pg3200.txt +teaching pg3200.txt, pg100.txt +teaching, pg3200.txt +teaching. pg100.txt +teachings pg3200.txt +teachings, pg3200.txt +teachings; pg3200.txt +teacup pg3200.txt +team pg3200.txt, pg100.txt +team, pg3200.txt +team. pg3200.txt +teaming. pg3200.txt +teams pg3200.txt +teamsters pg3200.txt +teamsters, pg3200.txt +teapot--and pg3200.txt +tear pg31100.txt, pg3200.txt, pg100.txt +tear! pg31100.txt, pg100.txt +tear!" pg3200.txt +tear, pg3200.txt, pg100.txt +tear-jug. pg3200.txt +tear-jug] pg3200.txt +tear. pg3200.txt, pg100.txt +tear: pg100.txt +tear; pg3200.txt, pg100.txt +tearful, pg3200.txt +tearing pg31100.txt, pg3200.txt, pg100.txt +tears pg31100.txt, pg3200.txt, pg100.txt +tears, pg3200.txt, pg100.txt +tears- pg100.txt +tears. pg31100.txt, pg3200.txt, pg100.txt +tears." pg3200.txt +tears.- pg100.txt +tears: pg100.txt +tears; pg31100.txt, pg3200.txt, pg100.txt +tears? pg3200.txt, pg100.txt +tearsheet pg100.txt +tearsheet! pg100.txt +tearsheet. pg100.txt +tease!" pg3200.txt +tease, pg31100.txt +teased pg31100.txt +teasing pg31100.txt +teasing?" pg31100.txt +teaspoon pg3200.txt +teat. pg100.txt +teatro pg3200.txt +teawamute pg3200.txt +technical pg3200.txt +technicalities pg3200.txt +technicalities. pg3200.txt +technically pg3200.txt +technique, pg3200.txt +technique?" pg3200.txt +tecumseh?" pg3200.txt +tedious pg3200.txt, pg100.txt +tedious, pg3200.txt, pg100.txt +tedious. pg100.txt +tedious; pg100.txt +tediously pg3200.txt +tediousness. pg100.txt +tedium pg3200.txt +teem, pg100.txt +teemed pg3200.txt +teeming pg3200.txt +teems pg100.txt +teen! pg100.txt +teen, pg100.txt +teen. pg100.txt +teeth pg31100.txt, pg3200.txt, pg100.txt +teeth!' pg100.txt +teeth!--and pg3200.txt +teeth, pg31100.txt, pg3200.txt, pg100.txt +teeth- pg100.txt +teeth--a pg3200.txt +teeth--and pg3200.txt +teeth--wore pg3200.txt +teeth. pg3200.txt, pg100.txt +teeth.' pg100.txt +teeth: pg3200.txt +teeth; pg3200.txt +teeth? pg100.txt +teeth?' pg3200.txt +teething pg3200.txt +teething, pg3200.txt +teething--think pg3200.txt +teething. pg3200.txt +teetotaler pg3200.txt +teetotaler!" pg3200.txt +teetotaler, pg3200.txt +teetotalers pg3200.txt +teipsum; pg100.txt +telamonius, pg100.txt +telegram pg3200.txt +telegram, pg3200.txt +telegram-- pg3200.txt +telegram--and pg3200.txt +telegram. pg3200.txt +telegram: pg3200.txt +telegram?" pg3200.txt +telegrams pg3200.txt +telegrams. pg3200.txt +telegrams: pg3200.txt +telegrams?] pg3200.txt +telegraph pg3200.txt +telegraph, pg3200.txt +telegraph--anything pg3200.txt +telegraph-blank. pg3200.txt +telegraph-pole pg3200.txt +telegraph." pg3200.txt +telegraph; pg3200.txt +telegraph?" pg3200.txt +telegraphed pg3200.txt +telegraphic pg3200.txt +telegraphically. pg3200.txt +telegraphing pg3200.txt +telegraphs pg3200.txt +telegraphs. pg3200.txt +telegraphy pg3200.txt +telegraphy, pg3200.txt +telelectroscope pg3200.txt +telephone pg3200.txt +telephone, pg3200.txt +telephone. pg3200.txt +telephone: pg3200.txt +telephoned pg3200.txt +telescope pg3200.txt +telescope, pg3200.txt +telescope. pg3200.txt +telescope; pg3200.txt +telescope] pg3200.txt +telescopes pg3200.txt +telescopes, pg3200.txt +telescopes; pg3200.txt +telescopulariat--i pg3200.txt +tell pg31100.txt, pg3200.txt, pg100.txt +tell! pg100.txt +tell!" pg31100.txt, pg3200.txt +tell, pg3200.txt, pg100.txt +tell- pg100.txt +tell--for pg3200.txt +tell-tale pg3200.txt +tell. pg31100.txt, pg3200.txt, pg100.txt +tell." pg31100.txt, pg3200.txt +tell.' pg100.txt +tell: pg100.txt +tell; pg31100.txt, pg3200.txt, pg100.txt +tell? pg100.txt +tell?" pg31100.txt, pg3200.txt +teller pg3200.txt +teller. pg100.txt +telling pg31100.txt, pg3200.txt, pg100.txt +telling, pg3200.txt +telling. pg3200.txt, pg100.txt +telling; pg3200.txt +tells pg31100.txt, pg3200.txt, pg100.txt +telltale pg3200.txt +tellus, pg100.txt +temerity. pg3200.txt +temper pg31100.txt, pg3200.txt, pg100.txt +temper'd pg100.txt +temper'd, pg100.txt +temper'd. pg100.txt +temper, pg31100.txt, pg3200.txt, pg100.txt +temper- pg100.txt +temper-- pg3200.txt +temper. pg31100.txt, pg3200.txt, pg100.txt +temper." pg31100.txt, pg3200.txt +temper; pg31100.txt, pg3200.txt, pg100.txt +temper? pg3200.txt +temper?" pg31100.txt +temperament pg3200.txt +temperament, pg3200.txt +temperament. pg3200.txt +temperament; pg3200.txt +temperaments pg3200.txt +temperaments. pg3200.txt +temperance pg3200.txt +temperance, pg3200.txt +temperance. pg3200.txt, pg100.txt +temperate, pg100.txt +temperate. pg100.txt +temperate: pg100.txt +temperature pg3200.txt +temperature. pg3200.txt +tempered pg31100.txt, pg3200.txt +tempered, pg31100.txt +tempers pg31100.txt, pg3200.txt +tempers, pg3200.txt +tempers. pg31100.txt +tempest pg31100.txt, pg3200.txt, pg100.txt +tempest": pg3200.txt +tempest, pg3200.txt, pg100.txt +tempest-toss'd. pg100.txt +tempest. pg3200.txt, pg100.txt +tempest: pg3200.txt +tempests pg100.txt +temple pg3200.txt, pg100.txt +temple, pg3200.txt, pg100.txt +temple--and pg3200.txt +temple--surfeited pg3200.txt +temple. pg3200.txt, pg100.txt +temple; pg3200.txt, pg100.txt +temple? pg3200.txt +temples pg3200.txt +temples, pg3200.txt +temples. pg3200.txt, pg100.txt +temples: pg3200.txt +temporal, pg100.txt +temporal. pg100.txt +temporally pg31100.txt +temporarily! pg3200.txt +temporarily, pg3200.txt +temporarily; pg3200.txt +temporary pg31100.txt, pg3200.txt +temporary; pg3200.txt +temporiz'd. pg100.txt +temporize, pg3200.txt +temporizer pg100.txt +temporizing pg31100.txt +temps pg3200.txt +temps. pg100.txt +tempt pg31100.txt, pg3200.txt +tempt; pg100.txt +temptation pg31100.txt, pg3200.txt, pg100.txt +temptation, pg3200.txt +temptation--advice pg3200.txt +temptation. pg31100.txt, pg3200.txt +temptation." pg3200.txt +temptations pg31100.txt +tempted pg31100.txt, pg3200.txt +tempted, pg31100.txt, pg3200.txt +tempted. pg100.txt +tempter pg3200.txt +tempting pg31100.txt +tempting, pg3200.txt +tempting. pg3200.txt +tempts pg100.txt +ten pg31100.txt, pg3200.txt, pg100.txt +ten! pg3200.txt, pg100.txt +ten, pg31100.txt, pg3200.txt, pg100.txt +ten-----" pg3200.txt +ten-pins pg3200.txt +ten-story pg3200.txt +ten. pg31100.txt, pg3200.txt, pg100.txt +ten." pg3200.txt +ten.' pg100.txt +ten; pg3200.txt +ten? pg3200.txt +tenacious, pg3200.txt +tenacity pg3200.txt +tenacity. pg3200.txt +tenant pg31100.txt +tenant, pg100.txt +tenant. pg31100.txt +tenant." pg31100.txt +tenant?" pg31100.txt +tenantless, pg100.txt +tenants, pg31100.txt +tenants. pg3200.txt, pg100.txt +tench. pg100.txt +tend pg100.txt +tend, pg100.txt +tend. pg100.txt +tend; pg100.txt +tend? pg100.txt +tend?" pg31100.txt +tendance pg100.txt +tendance, pg100.txt +tendency pg3200.txt +tendency, pg31100.txt +tendency. pg31100.txt +tender pg31100.txt, pg3200.txt, pg100.txt +tender'd pg100.txt +tender'd; pg100.txt +tender, pg31100.txt, pg100.txt +tender-footed pg3200.txt +tender-minded pg100.txt +tender-smelling pg100.txt +tender. pg31100.txt, pg3200.txt, pg100.txt +tender; pg100.txt +tendered pg31100.txt, pg3200.txt, pg100.txt +tenderer pg3200.txt +tenderer. pg3200.txt +tenderly pg31100.txt, pg3200.txt +tenderness pg31100.txt, pg3200.txt, pg100.txt +tenderness! pg31100.txt +tenderness, pg31100.txt, pg3200.txt, pg100.txt +tenderness. pg3200.txt, pg100.txt +tendernesses pg3200.txt +tenders pg3200.txt, pg100.txt +tending pg31100.txt, pg3200.txt +tending; pg100.txt +tendrils pg3200.txt +tends pg3200.txt +tends, pg100.txt +tends- pg100.txt +tenements pg31100.txt +tenements, pg100.txt +tenets. pg3200.txt +tenfold pg100.txt +tenfold, pg3200.txt +tenir pg3200.txt +tenn. pg3200.txt +tennessee pg3200.txt +tennessee, pg3200.txt +tennessee. pg3200.txt +tennessee.' pg3200.txt +tennesseean pg3200.txt +tennysons, pg3200.txt +tenor pg3200.txt +tenor, pg3200.txt +tenor. pg3200.txt +tenour pg31100.txt, pg100.txt +tenour, pg100.txt +tenour. pg100.txt +tenpins, pg3200.txt +tense pg3200.txt +tent pg3200.txt, pg100.txt +tent, pg3200.txt, pg100.txt +tent- pg100.txt +tent-pole, pg3200.txt +tent. pg3200.txt, pg100.txt +tent: pg100.txt +tent; pg100.txt +tent? pg100.txt +tent] pg100.txt +tentatively: pg3200.txt +tenth pg3200.txt +tenth, pg100.txt +tenth. pg3200.txt, pg100.txt +tents pg31100.txt, pg3200.txt, pg100.txt +tents, pg3200.txt, pg100.txt +tents. pg100.txt +tents; pg3200.txt, pg100.txt +tenure. pg100.txt +tenures, pg100.txt +term pg3200.txt, pg100.txt +term, pg3200.txt, pg100.txt +term. pg3200.txt +term." pg3200.txt +term: pg31100.txt, pg3200.txt +term; pg100.txt +termed pg31100.txt, pg3200.txt +termes, pg3200.txt +terminate. pg3200.txt +terminated, pg31100.txt +termination pg3200.txt +termination. pg31100.txt +terminations, pg100.txt +terminology pg3200.txt +terminology. pg3200.txt +terms pg31100.txt, pg3200.txt, pg100.txt +terms, pg31100.txt, pg3200.txt, pg100.txt +terms-- pg3200.txt +terms--we pg3200.txt +terms. pg31100.txt, pg3200.txt, pg100.txt +terms." pg31100.txt, pg3200.txt +terms: pg3200.txt +terms; pg3200.txt, pg100.txt +terms? pg100.txt +terms?" pg3200.txt +terrace pg100.txt +terraced pg3200.txt +terraces pg3200.txt +terraces, pg3200.txt +terre'. pg3200.txt +terre- pg100.txt +terrestrial; pg100.txt +terrible pg31100.txt, pg3200.txt, pg100.txt +terrible! pg100.txt +terrible--just pg3200.txt +terrible. pg3200.txt, pg100.txt +terribly pg31100.txt, pg3200.txt +terribly. pg100.txt +terrier pg3200.txt +terrific pg3200.txt +terrific. pg3200.txt +terrified pg3200.txt +terrifies, pg3200.txt +territorial pg3200.txt +territorially-immense pg3200.txt +territories pg3200.txt, pg100.txt +territories, pg3200.txt, pg100.txt +territories. pg100.txt +territories; pg100.txt +territory pg3200.txt +territory, pg3200.txt +territory--an pg3200.txt +territory. pg3200.txt, pg100.txt +territory? pg3200.txt +terror pg31100.txt, pg3200.txt, pg100.txt +terror, pg31100.txt, pg3200.txt, pg100.txt +terror. pg31100.txt, pg3200.txt, pg100.txt +terror.--"good pg31100.txt +terror; pg3200.txt +terror? pg100.txt +terror] pg3200.txt +terrors pg31100.txt, pg3200.txt +terrors, pg3200.txt, pg100.txt +terrors. pg31100.txt, pg3200.txt +terry pg3200.txt +terry----" pg3200.txt +terse pg3200.txt +tertian pg100.txt +test pg31100.txt, pg3200.txt, pg100.txt +test! pg31100.txt +test, pg3200.txt, pg100.txt +test-remark pg3200.txt +test-remark--nobody pg3200.txt +test-remark." pg3200.txt +test. pg3200.txt +testament pg3200.txt, pg100.txt +testament! pg100.txt +testament, pg3200.txt +testament- pg100.txt +testament. pg3200.txt, pg100.txt +testament: pg3200.txt +testament; pg3200.txt +tested pg3200.txt +tested. pg3200.txt +tested." pg3200.txt +testified pg3200.txt +testified, pg3200.txt +testified. pg3200.txt +testifies pg31100.txt +testify pg31100.txt, pg3200.txt, pg100.txt +testify. pg3200.txt, pg100.txt +testify." pg3200.txt +testify; pg3200.txt +testify? pg3200.txt +testifying, pg3200.txt +testifying. pg3200.txt +testily. pg3200.txt +testimonials pg3200.txt +testimonies pg3200.txt +testimonies, pg31100.txt +testimony pg31100.txt, pg3200.txt, pg100.txt +testimony, pg3200.txt +testimony--and pg3200.txt +testimony--imposing pg3200.txt +testimony. pg3200.txt, pg100.txt +testimony: pg3200.txt +testimony; pg100.txt +tests pg3200.txt +tests--the pg3200.txt +tetbury." pg31100.txt +tetchy: pg3200.txt +tete, pg3200.txt +tete-a-tete pg31100.txt +tetouan. pg3200.txt +tetuan pg3200.txt +tewksbury pg3200.txt, pg100.txt +tewksbury. pg100.txt +tewksbury; pg100.txt +tewksbury? pg100.txt +texas pg3200.txt +texas" pg3200.txt +texas, pg3200.txt +texas- pg3200.txt +texas. pg3200.txt +text pg31100.txt, pg3200.txt, pg100.txt +text, pg100.txt +text-book! pg3200.txt +text-book--but pg3200.txt +text-book."' pg3200.txt +text-meanings pg3200.txt +text. pg3200.txt, pg100.txt +text: pg3200.txt +text? pg3200.txt, pg100.txt +texts pg31100.txt, pg3200.txt +texture, pg3200.txt +th' pg100.txt +th'monument! pg100.txt +th'search, pg100.txt +thackeray, pg3200.txt +thakombau pg3200.txt +thakombau, pg3200.txt +thames pg31100.txt, pg100.txt +thames, pg3200.txt +than"-- pg31100.txt +than's pg100.txt +than, pg31100.txt +than-- pg3200.txt +than--" pg31100.txt, pg3200.txt +than--' pg3200.txt +thane, pg100.txt +thane? pg100.txt +thanes, pg100.txt +thank pg31100.txt, pg3200.txt, pg100.txt +thank'd, pg100.txt +thanked pg31100.txt, pg3200.txt, pg100.txt +thanked. pg3200.txt, pg100.txt +thankful pg31100.txt, pg3200.txt, pg100.txt +thankful, pg3200.txt +thankful. pg31100.txt, pg3200.txt, pg100.txt +thankful." pg3200.txt +thankfully pg31100.txt +thankfully, pg100.txt +thankfully. pg100.txt +thankfulness pg31100.txt, pg3200.txt +thankfulness! pg100.txt +thankfulness, pg31100.txt, pg100.txt +thankfulness. pg3200.txt, pg100.txt +thanking pg31100.txt, pg3200.txt, pg100.txt +thanks pg31100.txt, pg3200.txt, pg100.txt +thanks! pg100.txt +thanks!- pg100.txt +thanks, pg3200.txt, pg100.txt +thanks. pg31100.txt, pg3200.txt, pg100.txt +thanks." pg3200.txt +thanks.' pg100.txt +thanks: pg3200.txt, pg100.txt +thanks; pg3200.txt, pg100.txt +thanks? pg100.txt +thanks?" pg3200.txt +thanksgiving pg3200.txt, pg100.txt +thanksgiving. pg3200.txt +thanksgivings pg3200.txt +thanksgivings, pg3200.txt +thapa. pg3200.txt +thar. pg3200.txt +thar." pg3200.txt +that! pg3200.txt, pg100.txt +that!" pg31100.txt, pg3200.txt +that!' pg3200.txt +that!--pass pg3200.txt +that" pg3200.txt +that"-- pg3200.txt +that". pg3200.txt +that'?" pg31100.txt +that'd pg3200.txt +that'll pg3200.txt +that's pg31100.txt, pg3200.txt, pg100.txt +that's--" pg3200.txt +that) pg3200.txt +that). pg3200.txt +that, pg31100.txt, pg3200.txt, pg100.txt +that,--but--" pg3200.txt +that- pg100.txt +that-- pg31100.txt, pg3200.txt +that--" pg31100.txt, pg3200.txt +that--and pg3200.txt +that--as pg31100.txt +that--everything pg3200.txt +that--fire. pg3200.txt +that--i pg31100.txt +that--if pg3200.txt +that--it pg3200.txt +that--it's pg3200.txt +that--none pg3200.txt +that--take pg3200.txt +that--that--" pg3200.txt +that--that--' pg3200.txt +that--the pg3200.txt +that--they pg3200.txt +that--vermin!" pg3200.txt +that--you pg3200.txt +that-air pg3200.txt +that-way-accomplish'd pg100.txt +that. pg31100.txt, pg3200.txt, pg100.txt +that." pg31100.txt, pg3200.txt +that.' pg3200.txt +that.) pg3200.txt +that.- pg100.txt +that.--a pg31100.txt +that..." pg31100.txt +that: pg3200.txt, pg100.txt +that:" pg3200.txt +that; pg31100.txt, pg3200.txt, pg100.txt +that;" pg31100.txt +that? pg3200.txt, pg100.txt +that?" pg31100.txt, pg3200.txt +that?". pg3200.txt +that?' pg3200.txt, pg100.txt +that?----isn't pg3200.txt +that?--absolutely pg3200.txt +that?--on pg3200.txt +thatch'd. pg100.txt +thatch-roofed pg3200.txt +thatcher pg3200.txt +thatcher, pg3200.txt +thatcher. pg3200.txt +thaw'd; pg100.txt +thaw. pg100.txt +thawing, pg3200.txt +thayer. pg3200.txt +the's, pg3200.txt +the, pg3200.txt +the- pg100.txt +the-- pg3200.txt +the--" pg3200.txt +the----" pg3200.txt +the--a--a--" pg3200.txt +the--proofs." pg3200.txt +the--the pg3200.txt +the--the--" pg3200.txt +the--with pg3200.txt +the. pg3200.txt +theater pg3200.txt +theater, pg3200.txt +theater-actor pg3200.txt +theater." pg3200.txt +theater? pg3200.txt +theaters, pg3200.txt +theatre pg31100.txt, pg3200.txt, pg100.txt +theatre, pg31100.txt, pg3200.txt +theatre--to pg3200.txt +theatre-goers pg3200.txt +theatre-goers. pg3200.txt +theatre. pg100.txt +theatre; pg3200.txt +theatre? pg31100.txt +theatre?" pg31100.txt +theatres, pg3200.txt +theatres. pg31100.txt +theatrical pg3200.txt +theatrically pg3200.txt +theatricals pg31100.txt +theban. pg100.txt +thebes! pg3200.txt +thedore pg3200.txt +thee pg3200.txt, pg100.txt +thee! pg3200.txt, pg100.txt +thee!" pg3200.txt +thee, pg3200.txt, pg100.txt +thee- pg100.txt +thee--" pg3200.txt +thee. pg3200.txt, pg100.txt +thee." pg3200.txt, pg100.txt +thee.' pg100.txt +thee: pg3200.txt, pg100.txt +thee; pg3200.txt, pg100.txt +thee? pg3200.txt, pg100.txt +thee?" pg3200.txt +theft pg3200.txt, pg100.txt +theft!" pg3200.txt +theft, pg3200.txt +theft--" pg3200.txt +theft. pg3200.txt, pg100.txt +thefts pg100.txt +theil pg3200.txt +their's!--how pg31100.txt +their's, pg31100.txt +their's. pg31100.txt +theirs pg3200.txt, pg100.txt +theirs, pg31100.txt, pg3200.txt +theirs--that pg3200.txt +theirs. pg31100.txt, pg3200.txt, pg100.txt +theirs." pg31100.txt, pg3200.txt +theirs; pg31100.txt, pg3200.txt +theirselves pg3200.txt +them! pg3200.txt, pg100.txt +them!" pg31100.txt, pg3200.txt +them" pg3200.txt +them's pg3200.txt +them) pg31100.txt, pg3200.txt +them, pg31100.txt, pg3200.txt, pg100.txt +them," pg31100.txt, pg3200.txt +them,-- pg31100.txt +them,--that pg3200.txt +them- pg100.txt +them-- pg31100.txt, pg3200.txt +them--" pg3200.txt +them--. pg31100.txt +them--and pg31100.txt, pg3200.txt +them--as pg3200.txt +them--avoid pg3200.txt +them--but pg3200.txt +them--by pg31100.txt +them--critically? pg3200.txt +them--did, pg3200.txt +them--especially pg3200.txt +them--fact pg3200.txt +them--for pg31100.txt +them--he pg3200.txt +them--hey?" pg3200.txt +them--if pg3200.txt +them--in pg3200.txt +them--it pg3200.txt +them--like pg3200.txt +them--lots pg3200.txt +them--of pg3200.txt +them--they pg3200.txt +them--who pg3200.txt +them--with pg3200.txt +them--yes. pg3200.txt +them--you pg3200.txt +them-a pg3200.txt +them. pg31100.txt, pg3200.txt, pg100.txt +them." pg31100.txt, pg3200.txt +them.' pg3200.txt, pg100.txt +them.'" pg31100.txt +them.) pg3200.txt +them.--morning pg31100.txt +them.] pg100.txt +them: pg31100.txt, pg3200.txt, pg100.txt +them; pg31100.txt, pg3200.txt, pg100.txt +them? pg31100.txt, pg3200.txt, pg100.txt +them?" pg31100.txt, pg3200.txt +them?' pg3200.txt +them?--only pg31100.txt +them] pg100.txt +theme pg3200.txt, pg100.txt +theme, pg100.txt +theme. pg100.txt +theme: pg3200.txt +theme; pg100.txt +theme? pg100.txt +themes pg3200.txt, pg100.txt +themselves pg31100.txt, pg3200.txt, pg100.txt +themselves! pg3200.txt, pg100.txt +themselves, pg31100.txt, pg3200.txt, pg100.txt +themselves," pg31100.txt +themselves--it's pg3200.txt +themselves. pg31100.txt, pg3200.txt, pg100.txt +themselves." pg31100.txt, pg3200.txt +themselves.] pg3200.txt +themselves; pg31100.txt, pg3200.txt, pg100.txt +themselves? pg3200.txt, pg100.txt +themselves?" pg3200.txt +themselves?' pg3200.txt +themselves] pg100.txt +then! pg3200.txt, pg100.txt +then!" pg3200.txt +then"-- pg31100.txt +then, pg31100.txt, pg3200.txt, pg100.txt +then," pg3200.txt +then,-- pg3200.txt +then- pg100.txt +then-- pg3200.txt +then--" pg3200.txt +then---- pg3200.txt +then----" pg3200.txt +then--ah, pg3200.txt +then--and pg3200.txt +then--asked pg3200.txt +then--but pg3200.txt +then--f-z-t-! pg3200.txt +then--finished pg3200.txt +then--i pg3200.txt +then--in pg3200.txt +then--just pg3200.txt +then--more pg3200.txt +then--the pg3200.txt +then--till pg3200.txt +then--to pg3200.txt +then--what pg3200.txt +then--which pg3200.txt +then--why, pg3200.txt +then. pg31100.txt, pg3200.txt, pg100.txt +then." pg31100.txt, pg3200.txt +then.] pg3200.txt +then: pg3200.txt, pg100.txt +then; pg31100.txt, pg3200.txt, pg100.txt +then? pg3200.txt, pg100.txt +then?" pg31100.txt, pg3200.txt +then?' pg3200.txt +then?-- pg3200.txt +then?--" pg31100.txt +then?--come!" pg3200.txt +thence pg31100.txt, pg3200.txt, pg100.txt +thence! pg100.txt +thence, pg3200.txt, pg100.txt +thence. pg100.txt +thence; pg100.txt +thence? pg100.txt +thenceforth pg3200.txt +thenceforth, pg3200.txt +thenceforth. pg3200.txt +thenceforward pg3200.txt +theodor?" pg3200.txt +theodore pg3200.txt +theodore, pg31100.txt +theodore. pg3200.txt +theodorus. pg3200.txt +theologians pg3200.txt +theological pg3200.txt +theology pg3200.txt +theology, pg3200.txt +theoretical; pg3200.txt +theoretically pg3200.txt +theoretically, pg3200.txt +theoric, pg100.txt +theoric; pg100.txt +theories pg3200.txt +theories, pg3200.txt +theories. pg3200.txt +theorising, pg3200.txt +theorizing, pg3200.txt +theory pg3200.txt +theory, pg3200.txt +theory. pg3200.txt +theory: pg3200.txt +theory; pg3200.txt +theosophists, pg3200.txt +ther's pg3200.txt +there! pg3200.txt, pg100.txt +there!" pg3200.txt +there!"--and pg3200.txt +there!' pg3200.txt +there!- pg100.txt +there"--i pg3200.txt +there'd pg3200.txt +there'll pg3200.txt +there's pg3200.txt, pg100.txt +there, pg31100.txt, pg3200.txt, pg100.txt +there- pg100.txt +there--" pg31100.txt, pg3200.txt +there--'royal pg3200.txt +there--_now_ pg3200.txt +there--and pg3200.txt +there--farcillo? pg3200.txt +there--first." pg3200.txt +there--for pg3200.txt +there--for-- pg3200.txt +there--hasn't pg3200.txt +there--i pg3200.txt +there--if pg3200.txt +there--it pg3200.txt +there--now pg3200.txt +there--on pg3200.txt +there--one pg3200.txt +there--shoals pg3200.txt +there--st. pg3200.txt +there--that pg3200.txt +there--there! pg3200.txt +there-apple-johns? pg100.txt +there-caesar!'] pg100.txt +there. pg31100.txt, pg3200.txt, pg100.txt +there." pg31100.txt, pg3200.txt +there.' pg3200.txt, pg100.txt +there.--when pg3200.txt +there.? pg100.txt +there: pg3200.txt, pg100.txt +there; pg31100.txt, pg3200.txt, pg100.txt +there;" pg3200.txt +there? pg3200.txt, pg100.txt +there?" pg31100.txt, pg3200.txt +there?' pg100.txt +thereabout pg100.txt +thereabouts pg3200.txt +thereabouts. pg3200.txt +thereabouts." pg31100.txt +thereabouts; pg31100.txt, pg100.txt +thereabouts? pg100.txt +thereafter pg3200.txt +thereafter. pg3200.txt +thereat! pg100.txt +thereby pg3200.txt, pg100.txt +thereby! pg100.txt +thereby, pg100.txt +thereby. pg3200.txt, pg100.txt +thereby." pg3200.txt +therefore pg31100.txt, pg3200.txt, pg100.txt +therefore, pg31100.txt, pg3200.txt, pg100.txt +therefore- pg100.txt +therefore-- pg3200.txt +therefore. pg100.txt +therefore." pg31100.txt +therefore; pg100.txt +therefore? pg100.txt +therefrom pg3200.txt +therefrom, pg3200.txt +therefrom: pg3200.txt +therein pg3200.txt, pg100.txt +therein, pg100.txt +therein. pg100.txt +thereof! pg100.txt +thereof, pg3200.txt, pg100.txt +thereof. pg3200.txt, pg100.txt +thereof." pg3200.txt +thereof; pg100.txt +thereon pg100.txt +thereon; pg3200.txt +thereto pg3200.txt, pg100.txt +thereto, pg3200.txt +thereto. pg3200.txt +thereto." pg3200.txt +thereto; pg100.txt +theretofore, pg3200.txt +thereunto, pg100.txt +thereupon pg3200.txt +thereupon, pg31100.txt +therewith pg3200.txt +therewith." pg3200.txt +therewithal pg3200.txt, pg100.txt +therewithal, pg100.txt +therfore pg31100.txt +thermometer pg3200.txt +thermometer, pg3200.txt +thermometer. pg3200.txt +thermometers pg3200.txt +thersites pg100.txt +thersites! pg100.txt +thersites, pg100.txt +thersites. pg100.txt +thersites? pg100.txt +thesaurus pg3200.txt +these! pg3200.txt, pg100.txt +these, pg3200.txt, pg100.txt +these- pg100.txt +these--dames pg3200.txt +these--these--what pg3200.txt +these. pg31100.txt, pg3200.txt, pg100.txt +these." pg31100.txt, pg3200.txt +these: pg3200.txt +these:-- pg31100.txt +these; pg3200.txt, pg100.txt +these? pg3200.txt, pg100.txt +these?" pg3200.txt +these_ pg3200.txt +theseus pg100.txt +theseus. pg100.txt +theseus? pg100.txt +theseus] pg100.txt +thessaly pg100.txt +thessaly. pg100.txt +thetis! pg100.txt +theuer. pg3200.txt +they! pg3200.txt +they'd pg3200.txt +they'll pg3200.txt, pg100.txt +they're pg3200.txt +they've pg3200.txt +they, pg31100.txt, pg3200.txt, pg100.txt +they," pg31100.txt +they- pg100.txt +they-- pg3200.txt +they--" pg3200.txt +they--a--" pg3200.txt +they--according pg3200.txt +they--they--" pg3200.txt +they. pg3200.txt, pg100.txt +they." pg3200.txt +they.' pg3200.txt +they? pg3200.txt, pg100.txt +they?" pg31100.txt, pg3200.txt +they?' pg3200.txt +they?-- pg31100.txt +thibetan pg3200.txt +thick pg31100.txt, pg3200.txt, pg100.txt +thick! pg31100.txt, pg3200.txt +thick, pg3200.txt, pg100.txt +thick- pg100.txt +thick--and pg3200.txt +thick-fingered pg3200.txt +thick-headed pg3200.txt +thick-skin? pg100.txt +thick. pg31100.txt, pg3200.txt, pg100.txt +thick? pg100.txt +thicken, pg3200.txt +thickened pg3200.txt +thickened, pg3200.txt +thickens pg100.txt +thickens. pg3200.txt +thicker pg3200.txt +thickest pg3200.txt +thickest, pg3200.txt +thickest. pg31100.txt +thicket- pg100.txt +thickly pg3200.txt +thickness pg31100.txt, pg3200.txt +thief pg3200.txt, pg100.txt +thief! pg100.txt +thief!" pg3200.txt +thief!"--planted pg3200.txt +thief, pg100.txt +thief-stol'n, pg100.txt +thief. pg3200.txt, pg100.txt +thief." pg3200.txt +thief."] pg3200.txt +thief; pg100.txt +thief?" pg3200.txt +thievery. pg100.txt +thievery: pg100.txt +thieves pg3200.txt, pg100.txt +thieves! pg100.txt +thieves!" pg3200.txt +thieves, pg3200.txt, pg100.txt +thieves. pg3200.txt, pg100.txt +thieves." pg3200.txt +thieves; pg3200.txt, pg100.txt +thieves? pg100.txt +thieving pg3200.txt +thigh, pg100.txt +thigh. pg100.txt +thigh: pg3200.txt +thighs, pg3200.txt, pg100.txt +thim pg3200.txt +thimble, pg100.txt +thimble. pg3200.txt, pg100.txt +thimbleful pg3200.txt +thin!) pg3200.txt +thin, pg31100.txt, pg3200.txt +thin. pg3200.txt, pg100.txt +thin." pg3200.txt +thin; pg3200.txt +thine pg3200.txt, pg100.txt +thine! pg3200.txt, pg100.txt +thine, pg100.txt +thine. pg100.txt +thine." pg3200.txt +thine.' pg100.txt +thine: pg100.txt +thine; pg100.txt +thine? pg100.txt +thing pg31100.txt, pg3200.txt, pg100.txt +thing! pg31100.txt, pg3200.txt, pg100.txt +thing!" pg31100.txt, pg3200.txt +thing!' pg3200.txt +thing" pg3200.txt +thing, pg31100.txt, pg3200.txt, pg100.txt +thing- pg100.txt +thing-- pg3200.txt +thing--" pg3200.txt +thing--and pg31100.txt, pg3200.txt +thing--as pg31100.txt +thing--baseness pg3200.txt +thing--began pg3200.txt +thing--calculated pg3200.txt +thing--especially pg3200.txt +thing--even pg31100.txt +thing--general pg3200.txt +thing--has pg3200.txt +thing--i pg3200.txt +thing--it pg3200.txt +thing--scars pg3200.txt +thing--suggestion. pg3200.txt +thing--that pg3200.txt +thing--the pg3200.txt +thing--there pg3200.txt +thing--when pg3200.txt +thing--where?" pg3200.txt +thing--yes, pg3200.txt +thing. pg31100.txt, pg3200.txt, pg100.txt +thing." pg31100.txt, pg3200.txt +thing.' pg3200.txt +thing...." pg31100.txt +thing: pg3200.txt, pg100.txt +thing; pg31100.txt, pg3200.txt, pg100.txt +thing? pg31100.txt, pg3200.txt, pg100.txt +thing?" pg31100.txt, pg3200.txt +thing?' pg3200.txt +things pg31100.txt, pg3200.txt, pg100.txt +things! pg31100.txt, pg3200.txt, pg100.txt +things!" pg31100.txt, pg3200.txt +things" pg3200.txt +things, pg31100.txt, pg3200.txt, pg100.txt +things," pg31100.txt, pg3200.txt +things- pg100.txt +things--desolation pg3200.txt +things--didn't pg3200.txt +things--even pg3200.txt +things--i pg3200.txt +things--man. pg3200.txt +things--no, pg3200.txt +things. pg31100.txt, pg3200.txt, pg100.txt +things." pg31100.txt, pg3200.txt +things.' pg3200.txt +things.--and pg3200.txt +things: pg3200.txt, pg100.txt +things; pg31100.txt, pg3200.txt, pg100.txt +things? pg31100.txt, pg3200.txt, pg100.txt +things?" pg31100.txt, pg3200.txt +things?' pg3200.txt +think! pg3200.txt, pg100.txt +think!) pg100.txt +think"-- pg31100.txt +think'st pg100.txt +think't. pg100.txt +think) pg3200.txt +think). pg3200.txt +think, pg31100.txt, pg3200.txt, pg100.txt +think," pg31100.txt +think,) pg3200.txt +think- pg100.txt +think-- pg3200.txt +think--" pg31100.txt, pg3200.txt +think--but pg3200.txt +think--had pg31100.txt +think--in pg3200.txt +think--indeed pg31100.txt +think--nothing. pg3200.txt +think--so pg3200.txt +think--social pg3200.txt +think--stops pg3200.txt +think. pg31100.txt, pg3200.txt, pg100.txt +think." pg31100.txt, pg3200.txt +think.' pg3200.txt +think: pg3200.txt +think; pg3200.txt +think? pg3200.txt, pg100.txt +think?" pg31100.txt, pg3200.txt +think?' pg3200.txt +thinker pg3200.txt +thinker-- pg3200.txt +thinker. pg3200.txt +thinkers pg3200.txt +thinkest pg100.txt +thinking pg31100.txt, pg3200.txt, pg100.txt +thinking! pg3200.txt +thinking, pg31100.txt, pg3200.txt, pg100.txt +thinking-- pg3200.txt +thinking--that pg3200.txt +thinking--thinking pg3200.txt +thinking-process pg3200.txt +thinking. pg31100.txt, pg3200.txt, pg100.txt +thinking; pg3200.txt +thinking? pg31100.txt +thinkings, pg100.txt +thinkings. pg100.txt +thinks pg31100.txt, pg3200.txt, pg100.txt +thinks, pg100.txt +thinks--oh! pg31100.txt +thinks. pg3200.txt +thinks? pg100.txt +thinly pg3200.txt +thinly. pg100.txt +thinned pg3200.txt +thinner, pg31100.txt, pg3200.txt +thinness pg31100.txt +thinning pg3200.txt +third pg31100.txt, pg3200.txt, pg100.txt +third, pg31100.txt, pg3200.txt, pg100.txt +third- pg100.txt +third--peter. pg3200.txt +third--theodor pg3200.txt +third--this pg3200.txt +third-borough. pg100.txt +third-quality pg3200.txt +third. pg3200.txt, pg100.txt +third." pg31100.txt +third: pg3200.txt +third; pg3200.txt, pg100.txt +thirdly, pg3200.txt +thirdly. pg3200.txt +thirds pg31100.txt +thirst pg3200.txt, pg100.txt +thirst, pg3200.txt, pg100.txt +thirst--and pg3200.txt +thirst. pg100.txt +thirst; pg3200.txt +thirsted pg3200.txt +thirstiness pg3200.txt +thirsting pg3200.txt +thirsty pg3200.txt, pg100.txt +thirsty. pg3200.txt +thirsty." pg3200.txt +thirteen pg3200.txt +thirteen, pg3200.txt +thirteen-fifteenths pg3200.txt +thirteen. pg3200.txt +thirteen." pg31100.txt +thirteenth pg3200.txt +thirtieth pg100.txt +thirty pg31100.txt, pg3200.txt, pg100.txt +thirty, pg31100.txt, pg3200.txt +thirty-five pg31100.txt, pg3200.txt +thirty-five. pg3200.txt +thirty-four pg3200.txt +thirty-four. pg3200.txt +thirty-nine pg3200.txt +thirty-one pg3200.txt, pg100.txt +thirty-seven pg3200.txt +thirty-six pg3200.txt +thirty-three pg3200.txt +thirty-three-hour pg3200.txt +thirty. pg3200.txt, pg100.txt +thirty." pg3200.txt +this! pg31100.txt, pg3200.txt, pg100.txt +this!" pg3200.txt +this"-- pg3200.txt +this's pg3200.txt +this, pg31100.txt, pg3200.txt, pg100.txt +this- pg3200.txt, pg100.txt +this-- pg3200.txt +this--almost pg31100.txt +this--and pg31100.txt, pg3200.txt +this--i pg3200.txt +this--it pg3200.txt +this--that pg3200.txt +this--these pg3200.txt +this--they pg3200.txt +this--well, pg3200.txt +this--which pg31100.txt +this--you pg3200.txt +this-on pg100.txt +this-world pg3200.txt +this. pg31100.txt, pg3200.txt, pg100.txt +this." pg31100.txt, pg3200.txt +this.' pg3200.txt +this: pg31100.txt, pg3200.txt, pg100.txt +this:-- pg3200.txt +this; pg31100.txt, pg3200.txt, pg100.txt +this? pg31100.txt, pg3200.txt, pg100.txt +this?" pg31100.txt, pg3200.txt +this?' pg3200.txt +this?--miss pg31100.txt +thisby pg100.txt +thisby! pg100.txt +thisby's pg100.txt +thisby, pg100.txt +thisby. pg100.txt +thisby.' pg100.txt +thisne!' pg100.txt +thistle. pg100.txt +thither pg31100.txt, pg3200.txt, pg100.txt +thither! pg100.txt +thither, pg31100.txt, pg3200.txt, pg100.txt +thither- pg100.txt +thither. pg3200.txt, pg100.txt +thither." pg3200.txt +thither: pg3200.txt, pg100.txt +thither; pg100.txt +thither? pg100.txt +thlanover pg3200.txt +tho' pg31100.txt +thoght pg3200.txt +thomas pg31100.txt, pg3200.txt, pg100.txt +thomas! pg100.txt +thomas's pg31100.txt +thomas, pg31100.txt, pg3200.txt, pg100.txt +thomas. pg31100.txt, pg3200.txt, pg100.txt +thomas." pg31100.txt +thomas; pg31100.txt +thomas? pg100.txt +thomas?" pg31100.txt +thompson pg3200.txt +thompson's pg3200.txt +thompson, pg3200.txt +thompson--" pg3200.txt +thompson. pg3200.txt +thompson? pg3200.txt +thomson pg31100.txt +thomson: pg3200.txt +thorn pg3200.txt, pg100.txt +thorn, pg100.txt +thorn-bush; pg100.txt +thorn. pg3200.txt, pg100.txt +thorn; pg100.txt +thornburg pg3200.txt +thorndike pg3200.txt +thorndike. pg3200.txt +thorndike." pg3200.txt +thorns pg31100.txt, pg3200.txt, pg100.txt +thorns, pg100.txt +thorns. pg3200.txt +thorns; pg3200.txt +thornton pg31100.txt +thorough pg31100.txt +thoroughbrace pg3200.txt +thoroughfare, pg3200.txt +thoroughly pg31100.txt, pg3200.txt, pg100.txt +thoroughly, pg3200.txt +thoroughly. pg3200.txt +thoroughly; pg31100.txt +thoroughness pg3200.txt +thoroughness, pg3200.txt +thorpe pg31100.txt +thorpe's pg31100.txt +thorpe, pg31100.txt +thorpe. pg31100.txt +thorpe; pg31100.txt +thorpe?" pg31100.txt +those pg31100.txt, pg3200.txt, pg100.txt +those, pg31100.txt, pg100.txt +those- pg100.txt +those. pg3200.txt, pg100.txt +those: pg3200.txt +those?" pg31100.txt +thou! pg100.txt +thou'lt pg3200.txt +thou'rt pg3200.txt, pg100.txt +thou'st pg3200.txt, pg100.txt +thou, pg3200.txt, pg100.txt +thou- pg100.txt +thou--whomsoever pg31100.txt +thou-hast-wounded-the-spirit-that-loved-thee pg3200.txt +thou. pg100.txt +thou." pg3200.txt +thou.' pg100.txt +thou: pg100.txt +thou; pg100.txt +thou? pg100.txt +thou?" pg3200.txt +thou?- pg100.txt +though pg31100.txt, pg3200.txt, pg100.txt +though! pg100.txt +though!" pg3200.txt +though, pg31100.txt, pg3200.txt, pg100.txt +though," pg3200.txt +though,) pg3200.txt +though--and pg3200.txt +though--harris pg3200.txt +though--nothing's pg3200.txt +though--that's pg3200.txt +though. pg3200.txt +though; pg3200.txt +though? pg3200.txt, pg100.txt +though?" pg3200.txt +though?' pg3200.txt +thought pg31100.txt, pg3200.txt, pg100.txt +thought! pg3200.txt, pg100.txt +thought). pg3200.txt +thought): pg3200.txt +thought, pg31100.txt, pg3200.txt, pg100.txt +thought- pg3200.txt, pg100.txt +thought--" pg3200.txt +thought--' pg3200.txt +thought--and pg3200.txt +thought--for pg3200.txt +thought--that pg3200.txt +thought--that's pg3200.txt +thought-origin. pg3200.txt +thought-transference. pg3200.txt +thought. pg31100.txt, pg3200.txt, pg100.txt +thought." pg31100.txt +thought.' pg3200.txt +thought: pg3200.txt, pg100.txt +thought; pg31100.txt, pg3200.txt, pg100.txt +thought? pg3200.txt, pg100.txt +thought?" pg3200.txt +thoughtful pg31100.txt, pg3200.txt +thoughtful, pg3200.txt +thoughtful. pg31100.txt, pg3200.txt +thoughtfully pg3200.txt +thoughtfully, pg3200.txt +thoughtfulness pg31100.txt, pg3200.txt +thoughtless pg31100.txt, pg3200.txt +thoughtless! pg3200.txt +thoughtlessly pg3200.txt +thoughtlessness pg31100.txt, pg3200.txt +thoughtlessness, pg31100.txt +thoughts pg31100.txt, pg3200.txt, pg100.txt +thoughts! pg100.txt +thoughts, pg31100.txt, pg3200.txt, pg100.txt +thoughts- pg100.txt +thoughts. pg31100.txt, pg3200.txt, pg100.txt +thoughts." pg31100.txt +thoughts: pg100.txt +thoughts; pg31100.txt, pg100.txt +thous'n's pg3200.txt +thousand pg31100.txt, pg3200.txt, pg100.txt +thousand! pg3200.txt, pg100.txt +thousand!" pg3200.txt +thousand) pg3200.txt +thousand, pg3200.txt, pg100.txt +thousand- pg100.txt +thousand--' pg3200.txt +thousand-dollar pg3200.txt +thousand-fold pg100.txt +thousand. pg3200.txt, pg100.txt +thousand." pg3200.txt +thousand; pg3200.txt +thousand? pg100.txt +thousand?" pg3200.txt +thousands pg31100.txt, pg3200.txt, pg100.txt +thousands, pg31100.txt, pg3200.txt +thousands. pg31100.txt, pg3200.txt, pg100.txt +thousands.--'happier pg31100.txt +thousands; pg100.txt +thrall pg3200.txt +thrall'd pg100.txt +thrall, pg100.txt +thrall. pg100.txt +thrash pg3200.txt +thrashed pg3200.txt +thrashed, pg3200.txt +thrashing pg31100.txt, pg3200.txt +thrashing, pg3200.txt +thread pg3200.txt, pg100.txt +thread! pg100.txt +thread, pg31100.txt, pg3200.txt, pg100.txt +thread-spinners. pg3200.txt +thread. pg3200.txt, pg100.txt +threaded pg3200.txt +threading pg3200.txt +threads pg3200.txt +threat pg3200.txt, pg100.txt +threat'ned pg100.txt +threat, pg100.txt +threaten pg31100.txt, pg3200.txt, pg100.txt +threaten'd pg100.txt +threaten, pg3200.txt +threaten; pg100.txt +threatened pg3200.txt +threatened, pg31100.txt, pg3200.txt +threatened. pg31100.txt +threatening pg3200.txt +threateningly pg3200.txt +threatenings pg3200.txt +threatens pg3200.txt, pg100.txt +threats pg3200.txt, pg100.txt +threats! pg100.txt +threats), pg3200.txt +threats, pg31100.txt, pg3200.txt, pg100.txt +threats. pg3200.txt, pg100.txt +threats; pg3200.txt +three pg31100.txt, pg3200.txt, pg100.txt +three!" pg3200.txt +three'? pg100.txt +three, pg31100.txt, pg3200.txt, pg100.txt +three," pg31100.txt +three- pg3200.txt +three--" pg3200.txt +three--advance!" pg3200.txt +three--advance!' pg3200.txt +three--the pg3200.txt +three-cent pg3200.txt +three-deckers pg31100.txt +three-eights pg3200.txt +three-fifths pg3200.txt +three-fifty pg3200.txt +three-fourths pg3200.txt +three-fourths, pg3200.txt +three-globed pg3200.txt +three-pile; pg100.txt +three-ply--car---pet--containing pg3200.txt +three-quarters pg3200.txt +three-quarters. pg3200.txt +three-story pg3200.txt +three-times-three pg3200.txt +three-weeks pg3200.txt +three. pg31100.txt, pg3200.txt, pg100.txt +three." pg3200.txt +three: pg100.txt +three; pg31100.txt, pg3200.txt, pg100.txt +three? pg100.txt +three?" pg3200.txt +threepence pg100.txt +threes pg3200.txt +threescore pg3200.txt +threescore; pg100.txt +threshed pg3200.txt +threshold pg3200.txt, pg100.txt +threw pg31100.txt, pg3200.txt, pg100.txt +threw, pg3200.txt, pg100.txt +threw- pg100.txt +thrice pg3200.txt +thrice, pg100.txt +thrice-worthy pg100.txt +thrice? pg100.txt +thrift pg100.txt +thrift, pg100.txt +thrift. pg100.txt +thrifts, pg100.txt +thrifty pg3200.txt +thrill pg3200.txt +thrill!--it pg3200.txt +thrill, pg3200.txt +thrill-- pg3200.txt +thrill. pg3200.txt +thrilled pg3200.txt +thrilling pg3200.txt +thrilling. pg3200.txt +thrillingest pg3200.txt +thrive pg3200.txt, pg100.txt +thrive! pg100.txt +thrive, pg31100.txt, pg100.txt +thrive. pg3200.txt, pg100.txt +thrive; pg3200.txt +thrived pg3200.txt, pg100.txt +thriven pg3200.txt +throat pg31100.txt, pg3200.txt, pg100.txt +throat, pg3200.txt, pg100.txt +throat- pg100.txt +throat--hair pg3200.txt +throat. pg31100.txt, pg3200.txt, pg100.txt +throat." pg31100.txt +throat; pg31100.txt, pg3200.txt, pg100.txt +throat?" pg3200.txt +throats pg100.txt +throats, pg100.txt +throats. pg100.txt +throats; pg100.txt +throats? pg100.txt +throats] pg100.txt +throes pg3200.txt, pg100.txt +throes, pg100.txt +throgmorton pg3200.txt +thron'd. pg100.txt +throne pg3200.txt, pg100.txt +throne! pg100.txt +throne!" pg3200.txt +throne'- pg100.txt +throne's pg3200.txt +throne, pg3200.txt, pg100.txt +throne. pg31100.txt, pg3200.txt, pg100.txt +throne.---- pg31100.txt +throne; pg100.txt +throne? pg100.txt +throne] pg100.txt +throned pg3200.txt +throned, pg3200.txt +thrones pg3200.txt +thrones!" pg3200.txt +throng pg3200.txt, pg100.txt +throng!- pg100.txt +throng. pg100.txt +throng.) pg3200.txt +thronged pg3200.txt +thronging pg3200.txt +throngs pg3200.txt +throngs, pg100.txt +throngs; pg100.txt +throo pg3200.txt +throttle pg3200.txt +throttled pg3200.txt +throuble. pg3200.txt +through pg31100.txt, pg3200.txt, pg100.txt +through!" pg31100.txt, pg3200.txt +through't, pg100.txt +through, pg31100.txt, pg3200.txt, pg100.txt +through--filled pg3200.txt +through--only pg3200.txt +through. pg31100.txt, pg3200.txt, pg100.txt +through." pg31100.txt, pg3200.txt +through.' pg3200.txt +through.] pg3200.txt +through: pg3200.txt, pg100.txt +through; pg100.txt +through? pg3200.txt +through?" pg3200.txt +through?' pg3200.txt +throughfares pg100.txt +throughly. pg100.txt +throughly; pg100.txt +throughout pg31100.txt, pg3200.txt, pg100.txt +throughout, pg31100.txt +throve pg3200.txt +throw pg31100.txt, pg3200.txt, pg100.txt +throw, pg100.txt +throw. pg31100.txt, pg100.txt +throw: pg3200.txt +throw; pg100.txt +throwed pg3200.txt +thrower-out pg100.txt +throwest; pg100.txt +throwing pg31100.txt, pg3200.txt, pg100.txt +thrown pg31100.txt, pg3200.txt, pg100.txt +thrown, pg3200.txt +thrown. pg100.txt +thrown; pg3200.txt, pg100.txt +throws pg3200.txt +thrum; pg100.txt +thrush pg31100.txt +thrush's pg31100.txt +thrush, pg31100.txt +thrust pg3200.txt, pg100.txt +thrust, pg100.txt +thrust. pg3200.txt, pg100.txt +thrusting pg3200.txt, pg100.txt +thrusts pg100.txt +thud. pg3200.txt +thug pg3200.txt +thug's pg3200.txt +thug's!" pg3200.txt +thug-tale pg3200.txt +thug. pg3200.txt +thuggee pg3200.txt +thuggee. pg3200.txt +thuggery. pg3200.txt +thugs pg3200.txt +thugs, pg3200.txt +thugs. pg3200.txt +thugs; pg3200.txt +thumb pg3200.txt, pg100.txt +thumb, pg3200.txt, pg100.txt +thumb-and-finger- pg3200.txt +thumb-tacks. pg3200.txt +thumb; pg3200.txt +thumbmark pg3200.txt +thumbs pg3200.txt +thumbs, pg100.txt +thump pg3200.txt, pg100.txt +thump'd pg100.txt +thump'd, pg100.txt +thump, pg3200.txt +thump. pg100.txt +thumping, pg3200.txt +thun. pg3200.txt +thunder pg3200.txt, pg100.txt +thunder! pg3200.txt +thunder, pg3200.txt, pg100.txt +thunder--" pg3200.txt +thunder-and- pg3200.txt +thunder-and-lightning pg3200.txt +thunder-bird pg3200.txt +thunder-bird, pg3200.txt +thunder-clap, pg3200.txt +thunder-clap. pg3200.txt +thunder-claps pg3200.txt +thunder-clouds; pg3200.txt +thunder-crashes pg3200.txt +thunder-stone; pg100.txt +thunder. pg3200.txt, pg100.txt +thunder: pg3200.txt +thunder? pg100.txt +thunder?" pg3200.txt +thunderbolt pg3200.txt, pg100.txt +thunderbolt. pg100.txt +thunderbolts, pg100.txt +thunderclap pg3200.txt +thundercrash pg3200.txt +thundered pg3200.txt +thundered. pg3200.txt +thundergust pg3200.txt +thundering pg3200.txt +thunderous pg3200.txt +thunders pg3200.txt +thunders! pg100.txt +thunders, pg3200.txt +thunders. pg3200.txt +thunderstone; pg100.txt +thunderstorms pg3200.txt +thunderstroke. pg100.txt +thunderstruck pg3200.txt +thurio pg100.txt +thurio! pg100.txt +thurio, pg100.txt +thurio. pg100.txt +thurio; pg100.txt +thurio? pg100.txt +thursday pg31100.txt, pg3200.txt, pg100.txt +thursday!" pg31100.txt +thursday, pg31100.txt +thursday. pg3200.txt +thursday." pg31100.txt, pg3200.txt +thursday? pg100.txt +thus pg31100.txt, pg3200.txt, pg100.txt +thus! pg100.txt +thus!' pg100.txt +thus, pg3200.txt, pg100.txt +thus- pg100.txt +thus-- pg31100.txt, pg3200.txt +thus----" pg3200.txt +thus--i pg3200.txt +thus. pg31100.txt, pg100.txt +thus: pg31100.txt, pg3200.txt, pg100.txt +thus:-- pg31100.txt +thus; pg100.txt +thus? pg3200.txt, pg100.txt +thwack pg3200.txt +thwarted pg31100.txt, pg3200.txt +thwarted." pg31100.txt +thwarting! pg100.txt +thyng. pg3200.txt +thyreus pg100.txt +thyreus. pg100.txt +thyreus; pg100.txt +thyself pg3200.txt, pg100.txt +thyself! pg100.txt +thyself!" pg3200.txt +thyself, pg3200.txt, pg100.txt +thyself- pg100.txt +thyself. pg3200.txt, pg100.txt +thyself." pg3200.txt +thyself; pg100.txt +thyself? pg100.txt +tiber pg100.txt +tiber! pg3200.txt +tiber. pg100.txt +tiberias pg3200.txt +tiberias, pg3200.txt +tiberias. pg3200.txt +tiberio. pg100.txt +tichborn pg3200.txt +tick pg3200.txt +tick, pg3200.txt +tick." pg3200.txt +tick?" pg3200.txt +ticket pg3200.txt +ticket), pg3200.txt +ticket, pg3200.txt +ticket--even pg3200.txt +ticket-holders pg3200.txt +ticket-office pg3200.txt +ticket. pg3200.txt +ticket.' pg3200.txt +ticket?" pg3200.txt +ticketed pg3200.txt +tickets pg3200.txt +tickets, pg31100.txt, pg3200.txt +tickets--good pg3200.txt +tickets?" pg3200.txt +ticking pg3200.txt +tickle pg3200.txt, pg100.txt +tickled pg100.txt +tickles pg3200.txt +tickling. pg100.txt +tidal pg3200.txt +tide pg3200.txt, pg100.txt +tide, pg100.txt +tide- pg100.txt +tide-water pg3200.txt +tide. pg3200.txt, pg100.txt +tide; pg100.txt +tide? pg100.txt +tides pg100.txt +tides! pg100.txt +tides, pg100.txt +tides. pg100.txt +tidies pg3200.txt +tidiness. pg3200.txt +tidings pg31100.txt, pg3200.txt, pg100.txt +tidings, pg31100.txt, pg100.txt +tidings. pg3200.txt, pg100.txt +tidings? pg100.txt +tidy pg31100.txt, pg3200.txt +tidy, pg3200.txt +tie pg31100.txt, pg3200.txt, pg100.txt +tie; pg3200.txt +tied pg31100.txt, pg3200.txt, pg100.txt +tied, pg3200.txt +tied. pg3200.txt, pg100.txt +tied." pg31100.txt +tied? pg100.txt +tier, pg3200.txt +tierce, pg3200.txt +ties pg3200.txt +ties, pg31100.txt, pg3200.txt +tiff pg3200.txt +tiger pg3200.txt +tiger!" pg3200.txt +tiger's pg3200.txt +tiger--ah-h-h! pg3200.txt +tiger-persuader pg3200.txt +tiger. pg3200.txt +tiger: pg100.txt +tiger; pg100.txt +tigers pg3200.txt +tigers, pg3200.txt +tigers. pg3200.txt +tigers? pg100.txt +tight pg3200.txt +tight, pg3200.txt +tighten pg3200.txt +tighter pg3200.txt +tightly; pg100.txt +tights pg3200.txt +tigress pg3200.txt +til pg3200.txt +tilbury pg3200.txt +tilbury's pg3200.txt +tilbury. pg3200.txt +tilden pg3200.txt +tile.) pg3200.txt +till pg31100.txt, pg3200.txt, pg100.txt +till--" pg3200.txt +till----" pg3200.txt +tiller pg3200.txt +tiller. pg3200.txt +tilly-vally, pg100.txt +tilney pg31100.txt +tilney's pg31100.txt +tilney's, pg31100.txt +tilney, pg31100.txt +tilney. pg31100.txt +tilney." pg31100.txt +tilney; pg31100.txt +tilneys pg31100.txt +tilneys, pg31100.txt +tilneys. pg31100.txt +tilneys; pg31100.txt +tilt pg3200.txt +tilted pg3200.txt +tilter, pg100.txt +tiltin' pg3200.txt +tilting pg3200.txt +tilts pg100.txt +timandra pg100.txt +timandra? pg100.txt +timber pg3200.txt +timber- pg3200.txt +timber. pg31100.txt, pg3200.txt +timber; pg100.txt +timbered pg3200.txt +timbering; pg3200.txt +timbers pg3200.txt +time! pg31100.txt, pg3200.txt, pg100.txt +time!" pg3200.txt +time!' pg3200.txt +time!--hundred pg3200.txt +time!--oh! pg31100.txt +time"-- pg3200.txt +time"--a pg3200.txt +time's pg3200.txt +time), pg3200.txt +time, pg31100.txt, pg3200.txt, pg100.txt +time-- pg3200.txt +time--" pg3200.txt +time--a pg3200.txt +time--and pg31100.txt, pg3200.txt +time--anybody pg3200.txt +time--but pg3200.txt +time--for pg31100.txt +time--i pg3200.txt +time--is pg3200.txt +time--it pg3200.txt +time--not pg3200.txt +time--otherwise pg3200.txt +time--so pg3200.txt +time--so, pg3200.txt +time--such pg3200.txt +time--that's pg3200.txt +time-honored pg3200.txt +time-lock; pg3200.txt +time-pieces pg3200.txt +time-table; pg3200.txt +time-tables pg3200.txt +time-throwing pg3200.txt +time. pg31100.txt, pg3200.txt, pg100.txt +time." pg31100.txt, pg3200.txt, pg100.txt +time.' pg3200.txt +time.- pg100.txt +time.] pg3200.txt +time: pg3200.txt, pg100.txt +time; pg31100.txt, pg3200.txt, pg100.txt +time? pg3200.txt, pg100.txt +time?" pg31100.txt, pg3200.txt +time?' pg3200.txt +timed pg3200.txt +timed. pg31100.txt +timed." pg3200.txt +timely pg3200.txt +times pg31100.txt, pg3200.txt, pg100.txt +times! pg3200.txt, pg100.txt +times!" pg3200.txt +times" pg3200.txt +times"--or pg3200.txt +times' pg3200.txt +times, pg31100.txt, pg3200.txt, pg100.txt +times," pg3200.txt +times- pg100.txt +times--" pg3200.txt +times--but pg3200.txt +times--dinner pg3200.txt +times--i pg3200.txt +times-democrat,' pg3200.txt +times. pg31100.txt, pg3200.txt, pg100.txt +times." pg31100.txt, pg3200.txt +times.) pg3200.txt +times: pg3200.txt, pg100.txt +times; pg3200.txt, pg100.txt +times? pg3200.txt, pg100.txt +times?" pg3200.txt +times_ pg3200.txt +timid pg3200.txt +timid, pg31100.txt, pg3200.txt +timid-like: pg3200.txt +timidity pg31100.txt, pg3200.txt +timidity. pg31100.txt +timidly: pg3200.txt +timidly:-- pg3200.txt +timon pg100.txt +timon! pg100.txt +timon!' pg100.txt +timon's pg100.txt +timon's. pg100.txt +timon's? pg100.txt +timon, pg100.txt +timon- pg100.txt +timon. pg100.txt +timon; pg100.txt +timon? pg100.txt +timorously, pg3200.txt +tin pg3200.txt +tinct. pg100.txt +tinder-box: pg100.txt +tinge pg3200.txt +tinged pg3200.txt +tingle pg3200.txt +tingling. pg100.txt +tiniest pg3200.txt +tinker pg3200.txt, pg100.txt +tinker's pg100.txt +tinker. pg100.txt +tinkering.] pg3200.txt +tinkers pg100.txt +tinkle pg3200.txt +tinkling pg3200.txt +tinman, pg3200.txt +tinner pg3200.txt +tinner. pg3200.txt +tinner; pg3200.txt +tinsel pg3200.txt +tinseled pg3200.txt +tint pg3200.txt +tint, pg3200.txt +tint. pg3200.txt +tint.-- pg3200.txt +tint; pg3200.txt +tinted pg3200.txt +tinted, pg3200.txt +tintern pg31100.txt +tintings; pg3200.txt +tintless pg3200.txt +tints pg3200.txt +tints, pg3200.txt +tints. pg3200.txt +tintypes; pg3200.txt +tinware. pg3200.txt +tiny pg3200.txt, pg100.txt +tip pg3200.txt +tip-staves pg100.txt +tip-toeing pg3200.txt +tip-top. pg3200.txt +tippet. pg31100.txt +tips pg3200.txt +tips? pg3200.txt +tipsy pg3200.txt +tiptoe pg3200.txt +tiptoed pg3200.txt +tiptop, pg3200.txt +tir'd. pg100.txt +tire pg3200.txt +tire, pg100.txt +tire. pg100.txt +tire.' pg100.txt +tire; pg31100.txt +tired pg31100.txt, pg3200.txt, pg100.txt +tired! pg3200.txt +tired, pg31100.txt, pg3200.txt, pg100.txt +tired--" pg3200.txt +tired--hence pg3200.txt +tired. pg31100.txt, pg3200.txt +tired." pg31100.txt, pg3200.txt +tired; pg31100.txt, pg3200.txt +tired?" pg3200.txt +tireless, pg3200.txt +tires pg31100.txt, pg3200.txt, pg100.txt +tiresome pg31100.txt, pg3200.txt +tiresome! pg31100.txt +tiresome. pg31100.txt, pg3200.txt +tiresome; pg3200.txt +tiring pg3200.txt +tisick, pg100.txt +tissue pg3200.txt +tissue, pg100.txt +tissue-paper pg3200.txt +tissue. pg3200.txt +titania, pg100.txt +titania. pg100.txt +titanic pg3200.txt +titanic." pg3200.txt +titans pg3200.txt +tithe pg100.txt +tithing, pg100.txt +titian pg3200.txt +titian's pg3200.txt +titian. pg3200.txt +titians pg3200.txt +titinius! pg100.txt +titinius, pg100.txt +titinius," pg100.txt +titinius. pg100.txt +titinius: pg100.txt +title pg31100.txt, pg3200.txt, pg100.txt +title! pg100.txt +title) pg3200.txt +title, pg3200.txt, pg100.txt +title- pg100.txt +title-leaf, pg100.txt +title-page pg3200.txt +title. pg31100.txt, pg3200.txt +title." pg3200.txt +title? pg3200.txt, pg100.txt +titled pg3200.txt +titleless, pg100.txt +titles pg3200.txt +titles, pg3200.txt, pg100.txt +titles. pg3200.txt, pg100.txt +titmarsh.'" pg3200.txt +titter pg3200.txt +tittered, pg3200.txt +tittle-tattle, pg3200.txt +tittle-tattling pg100.txt +titus pg100.txt +titus, pg100.txt +titus. pg100.txt +titus: pg100.txt +titus] pg100.txt +to! pg3200.txt, pg100.txt +to!" pg3200.txt +to!' pg100.txt +to's. pg100.txt +to't pg100.txt +to't! pg100.txt +to't, pg100.txt +to't. pg100.txt +to't: pg100.txt +to't; pg100.txt +to't? pg100.txt +to, pg31100.txt, pg3200.txt, pg100.txt +to- pg100.txt +to-- pg31100.txt, pg3200.txt +to--" pg31100.txt, pg3200.txt +to--' pg3200.txt +to--. pg3200.txt +to--["six pg3200.txt +to--but pg31100.txt +to--charlie pg3200.txt +to--injun pg3200.txt +to--like pg3200.txt +to--mrs. pg3200.txt +to--my pg31100.txt +to--nearer--" pg3200.txt +to--o pg3200.txt +to--oh, pg3200.txt +to--royalty?" pg3200.txt +to--some pg3200.txt +to--that's pg3200.txt +to--the pg3200.txt +to--there pg3200.txt +to--to pg3200.txt +to--to-- pg3200.txt +to--to--what pg3200.txt +to--you pg3200.txt +to-day pg31100.txt, pg3200.txt, pg100.txt +to-day! pg100.txt +to-day!" pg31100.txt +to-day's pg3200.txt +to-day, pg31100.txt, pg3200.txt, pg100.txt +to-day--except pg3200.txt +to-day--for pg3200.txt +to-day. pg3200.txt, pg100.txt +to-day." pg31100.txt, pg3200.txt +to-day.'" pg3200.txt +to-day: pg100.txt +to-day; pg31100.txt, pg3200.txt, pg100.txt +to-day? pg100.txt +to-day?" pg31100.txt, pg3200.txt +to-day?' pg100.txt +to-morrow pg31100.txt, pg3200.txt, pg100.txt +to-morrow! pg31100.txt, pg3200.txt, pg100.txt +to-morrow!" pg3200.txt +to-morrow's pg31100.txt +to-morrow's. pg31100.txt +to-morrow, pg31100.txt, pg3200.txt, pg100.txt +to-morrow- pg100.txt +to-morrow-- pg3200.txt +to-morrow. pg31100.txt, pg3200.txt, pg100.txt +to-morrow." pg31100.txt, pg3200.txt +to-morrow.' pg100.txt +to-morrow: pg31100.txt +to-morrow; pg31100.txt, pg3200.txt, pg100.txt +to-morrow? pg31100.txt, pg100.txt +to-morrow?" pg31100.txt +to-night pg31100.txt, pg3200.txt, pg100.txt +to-night! pg100.txt +to-night!" pg31100.txt, pg3200.txt +to-night, pg3200.txt, pg100.txt +to-night- pg100.txt +to-night--especially pg3200.txt +to-night. pg31100.txt, pg3200.txt, pg100.txt +to-night." pg31100.txt, pg3200.txt +to-night.' pg3200.txt +to-night: pg100.txt +to-night; pg100.txt +to-night? pg3200.txt, pg100.txt +to-night?" pg3200.txt +to-wit, pg3200.txt +to. pg31100.txt, pg3200.txt, pg100.txt +to." pg31100.txt, pg3200.txt +to.' pg3200.txt +to: pg3200.txt, pg100.txt +to; pg31100.txt, pg3200.txt, pg100.txt +to? pg31100.txt, pg3200.txt, pg100.txt +to?" pg31100.txt, pg3200.txt +to?' pg3200.txt +toad pg100.txt +toad! pg100.txt +toad, pg100.txt +toad. pg100.txt +toads pg3200.txt, pg100.txt +toads, pg100.txt +toads. pg100.txt +toadstools, pg3200.txt +toast pg3200.txt +toast, pg3200.txt +toasting-iron pg100.txt +toastmaster: pg3200.txt +toasts pg3200.txt +tobacco pg3200.txt +tobacco, pg3200.txt +tobacco--and pg3200.txt +tobacco. pg3200.txt +tobacco." pg3200.txt +tobacco; pg3200.txt +tobacker, pg3200.txt +tobacker-field. pg3200.txt +toby pg100.txt +toby! pg100.txt +toby, pg100.txt +toby- pg100.txt +toby. pg100.txt +toby? pg100.txt +today pg31100.txt, pg3200.txt, pg100.txt +today! pg3200.txt +today!" pg3200.txt +today, pg31100.txt, pg3200.txt, pg100.txt +today--ornithorhynchus pg3200.txt +today. pg3200.txt, pg100.txt +today." pg31100.txt, pg3200.txt +today.) pg3200.txt +today; pg100.txt +today? pg3200.txt, pg100.txt +today?" pg31100.txt +toddle pg3200.txt +todpole, pg100.txt +toe pg3200.txt, pg100.txt +toe, pg3200.txt, pg100.txt +toe. pg3200.txt, pg100.txt +toe." pg3200.txt +toe? pg100.txt +toes pg3200.txt, pg100.txt +toes, pg3200.txt +together pg31100.txt, pg3200.txt, pg100.txt +together! pg100.txt +together!" pg31100.txt, pg3200.txt +together), pg3200.txt +together, pg31100.txt, pg3200.txt, pg100.txt +together," pg31100.txt +together- pg100.txt +together-- pg3200.txt +together--" pg3200.txt +together--and pg3200.txt +together--go!" pg3200.txt +together--in pg3200.txt +together--it pg3200.txt +together. pg31100.txt, pg3200.txt, pg100.txt +together." pg31100.txt, pg3200.txt +together.--emma pg31100.txt +together._] pg31100.txt +together: pg3200.txt, pg100.txt +together; pg31100.txt, pg3200.txt, pg100.txt +together? pg31100.txt, pg100.txt +together?" pg31100.txt, pg3200.txt +together] pg100.txt +toggery pg3200.txt +togs pg3200.txt +togs." pg3200.txt +toil pg31100.txt, pg3200.txt +toil, pg3200.txt, pg100.txt +toil. pg3200.txt +toil; pg100.txt +toil? pg100.txt +toiled pg3200.txt +toiled: pg100.txt +toilers pg3200.txt +toilet pg3200.txt +toilet--no pg3200.txt +toilett pg31100.txt +toilette pg31100.txt +toilette. pg31100.txt +toiling pg3200.txt +toils pg31100.txt +toils. pg3200.txt +token pg31100.txt, pg3200.txt, pg100.txt +token, pg100.txt +token. pg100.txt +tokens pg100.txt +tokens, pg100.txt +tokens. pg100.txt +tol'able pg3200.txt +told pg31100.txt, pg3200.txt, pg100.txt +told!" pg31100.txt +told, pg3200.txt, pg100.txt +told--off pg3200.txt +told--oh, pg3200.txt +told. pg3200.txt, pg100.txt +told." pg31100.txt, pg3200.txt +told: pg100.txt +told; pg100.txt +told? pg100.txt +told?" pg3200.txt +tole pg3200.txt +tolerable pg31100.txt, pg3200.txt +tolerable! pg31100.txt +tolerable, pg100.txt +tolerable. pg31100.txt +tolerable." pg31100.txt +tolerably pg31100.txt, pg3200.txt +tolerably. pg31100.txt +tolerably." pg31100.txt +tolerate pg3200.txt +toleration pg31100.txt +toleration. pg3200.txt +toll pg3200.txt +toll, pg100.txt +toll. pg3200.txt +tollable pg3200.txt +tolled pg3200.txt +tolling pg3200.txt +tollmache?" pg3200.txt +tom pg31100.txt, pg3200.txt +tom! pg31100.txt, pg3200.txt, pg100.txt +tom!" pg3200.txt +tom!' pg100.txt +tom!--tell pg3200.txt +tom's pg3200.txt, pg100.txt +tom's.--m.t.] pg3200.txt +tom, pg31100.txt, pg3200.txt +tom--" pg3200.txt +tom-cat. pg3200.txt +tom-cats, pg3200.txt +tom. pg31100.txt, pg3200.txt, pg100.txt +tom." pg31100.txt, pg3200.txt +tom: pg3200.txt +tom; pg3200.txt +tom? pg3200.txt +tom?" pg3200.txt +tomahawk pg3200.txt +tomahawk. pg3200.txt +tomahawk; pg3200.txt +tomahawk?" pg3200.txt +tomahawk?' pg3200.txt +tomahawked pg3200.txt +tomahawks. pg3200.txt +tomato pg3200.txt +tomato, pg3200.txt +tomatoes. pg3200.txt +tomb pg3200.txt, pg100.txt +tomb! pg3200.txt, pg100.txt +tomb, pg3200.txt, pg100.txt +tomb. pg3200.txt, pg100.txt +tomb.] pg100.txt +tomb: pg3200.txt, pg100.txt +tomb; pg100.txt +tomb? pg100.txt +tomb] pg100.txt +tombs pg3200.txt +tombs! pg3200.txt +tombs, pg3200.txt, pg100.txt +tombs. pg3200.txt, pg100.txt +tombstone pg3200.txt +tombstone, pg3200.txt +tombstones pg3200.txt +tombstones. pg3200.txt +tomcats pg3200.txt +tomkins pg3200.txt +tommy pg3200.txt +tomorrow pg31100.txt, pg3200.txt, pg100.txt +tomorrow!" pg3200.txt +tomorrow, pg31100.txt, pg3200.txt, pg100.txt +tomorrow. pg31100.txt, pg3200.txt, pg100.txt +tomorrow." pg31100.txt, pg3200.txt +tomorrow? pg100.txt +tomorrow?" pg31100.txt, pg3200.txt +ton pg3200.txt +ton! pg3200.txt +ton, pg3200.txt, pg100.txt +ton--that pg3200.txt +ton. pg3200.txt +ton." pg3200.txt +ton; pg3200.txt +tonawanda pg3200.txt +tone pg31100.txt, pg3200.txt +tone)--she pg31100.txt +tone, pg31100.txt, pg3200.txt +tone-- pg31100.txt, pg3200.txt +tone--"my pg31100.txt +tone. pg31100.txt, pg3200.txt +tone: pg3200.txt +tone; pg3200.txt +toned pg3200.txt +tones pg31100.txt, pg3200.txt +tones, pg3200.txt +tones: pg3200.txt +tongariro pg3200.txt +tongs pg100.txt +tongs. pg3200.txt +tongue pg3200.txt, pg100.txt +tongue! pg100.txt +tongue!" pg31100.txt +tongue's pg100.txt +tongue) pg100.txt +tongue, pg31100.txt, pg3200.txt, pg100.txt +tongue- pg100.txt +tongue-flame, pg3200.txt +tongue-tied pg3200.txt +tongue-tied. pg3200.txt +tongue. pg31100.txt, pg3200.txt, pg100.txt +tongue." pg31100.txt, pg3200.txt +tongue.' pg100.txt +tongue: pg3200.txt, pg100.txt +tongue; pg31100.txt, pg3200.txt, pg100.txt +tongue? pg3200.txt, pg100.txt +tongueless pg100.txt +tongues pg3200.txt, pg100.txt +tongues! pg100.txt +tongues, pg3200.txt, pg100.txt +tongues- pg100.txt +tongues. pg3200.txt, pg100.txt +tongues; pg3200.txt, pg100.txt +tongues? pg100.txt +tonics pg3200.txt +tonight pg3200.txt, pg100.txt +tonight! pg100.txt +tonight!" pg31100.txt +tonight, pg3200.txt, pg100.txt +tonight. pg31100.txt, pg3200.txt, pg100.txt +tonight." pg3200.txt +tonnage--you pg3200.txt +tons pg3200.txt +tons--rooms pg3200.txt +tons.) pg3200.txt +tonsilitis pg3200.txt +tonty, pg3200.txt +too pg31100.txt, pg3200.txt, pg100.txt +too! pg31100.txt, pg3200.txt, pg100.txt +too!" pg3200.txt +too!"--and pg3200.txt +too!--i pg31100.txt +too" pg3200.txt +too) pg31100.txt, pg3200.txt +too), pg100.txt +too, pg31100.txt, pg3200.txt, pg100.txt +too,) pg3200.txt +too- pg100.txt +too--"saying, pg3200.txt +too--_kings!"_ pg3200.txt +too--a pg3200.txt +too--and pg31100.txt, pg3200.txt +too--apples. pg3200.txt +too--but pg3200.txt +too--by pg31100.txt +too--cordially." pg3200.txt +too--had pg3200.txt +too--he pg3200.txt +too--if pg3200.txt +too--knowing pg31100.txt +too--loafing pg3200.txt +too--often pg3200.txt +too--seemed pg3200.txt +too--the pg3200.txt +too--there pg3200.txt +too--therefore pg3200.txt +too--tickled pg3200.txt +too--whereas, pg3200.txt +too--which pg31100.txt +too. pg31100.txt, pg3200.txt, pg100.txt +too." pg31100.txt, pg3200.txt +too.' pg3200.txt +too..... pg3200.txt +too: pg3200.txt, pg100.txt +too; pg31100.txt, pg3200.txt, pg100.txt +too? pg31100.txt, pg3200.txt, pg100.txt +too?" pg31100.txt, pg3200.txt +too?' pg3200.txt +took pg31100.txt, pg3200.txt, pg100.txt +took'st pg100.txt +took't, pg100.txt +took, pg3200.txt, pg100.txt +took. pg3200.txt, pg100.txt +tookaram pg3200.txt +tookaram, pg3200.txt +tookaram. pg3200.txt +tookaram." pg3200.txt +tool pg3200.txt +tools pg3200.txt, pg100.txt +tools. pg3200.txt +toot, pg3200.txt +tooth pg31100.txt, pg3200.txt, pg100.txt +tooth! pg100.txt +tooth, pg31100.txt, pg100.txt +tooth-brush, pg3200.txt +tooth-wash pg3200.txt +tooth. pg3200.txt, pg100.txt +tooth; pg100.txt +tooth?" pg3200.txt +toothache pg3200.txt +toothache," pg3200.txt +toothache,' pg3200.txt +toothache. pg100.txt +toothache? pg100.txt +toothbrushes, pg3200.txt +toothpick-case pg31100.txt +toothpicker pg100.txt +tooting pg3200.txt +tooting. pg3200.txt +toowoomba pg3200.txt +top pg31100.txt, pg3200.txt, pg100.txt +top, pg3200.txt, pg100.txt +top--but pg3200.txt +top--just pg3200.txt +top-boots, pg3200.txt +top-buggy pg3200.txt +top-dressing pg3200.txt +top-full pg100.txt +top-heavy pg3200.txt +top. pg31100.txt, pg3200.txt, pg100.txt +top." pg3200.txt +top; pg3200.txt, pg100.txt +top?" pg3200.txt +topas! pg100.txt +topas, pg100.txt +topas. pg100.txt +topic pg31100.txt, pg3200.txt +topic, pg31100.txt, pg3200.txt +topic. pg3200.txt +topics pg31100.txt +topics, pg31100.txt, pg3200.txt +topics. pg3200.txt +topmast, pg100.txt +topmost pg3200.txt +topp'd? pg100.txt +topped pg3200.txt +topping pg3200.txt +topple pg3200.txt +topples pg100.txt +tops pg3200.txt, pg100.txt +tops, pg3200.txt, pg100.txt +tops- pg100.txt +tops. pg3200.txt, pg100.txt +torch pg3200.txt, pg100.txt +torch!" pg3200.txt +torch-baskets, pg3200.txt +torch-bearer. pg100.txt +torch-bearers. pg100.txt +torch-light pg3200.txt +torch. pg3200.txt, pg100.txt +torch; pg3200.txt +torch]. pg100.txt +torchbearer pg100.txt +torchbearers. pg100.txt +torches pg3200.txt, pg100.txt +torches, pg3200.txt +torches. pg3200.txt, pg100.txt +torchlight pg3200.txt +tore pg3200.txt, pg100.txt +toreckly, pg3200.txt +torment pg100.txt +torment!' pg100.txt +torment, pg31100.txt, pg100.txt +torment. pg100.txt +torment." pg31100.txt +torment? pg100.txt +tormented pg3200.txt +torments pg3200.txt +torments, pg100.txt +torn pg31100.txt, pg3200.txt +torn, pg3200.txt, pg100.txt +torn. pg100.txt +torn." pg31100.txt +tornado pg3200.txt +torpedo pg3200.txt +torpedo, pg3200.txt +torpedoes?" pg3200.txt +torpid pg3200.txt +torpid; pg3200.txt +torrent pg3200.txt +torrent, pg3200.txt +torrent. pg3200.txt +torrents pg3200.txt +torrents. pg3200.txt +torrid, pg3200.txt +tortoise pg3200.txt +tortoise-shell pg3200.txt +tortoises pg3200.txt +torts pg3200.txt +tortur'd pg100.txt +torture pg3200.txt, pg100.txt +torture, pg3200.txt, pg100.txt +torture. pg3200.txt, pg100.txt +tortured pg31100.txt, pg3200.txt +tortured; pg100.txt +torturer pg100.txt +tortures pg3200.txt +tortures." pg3200.txt +torturing pg3200.txt +torturing, pg3200.txt +toryne. pg100.txt +toss pg3200.txt +toss'd pg100.txt +toss, pg3200.txt +tossed pg3200.txt +tossing pg3200.txt +tot pg3200.txt +tot; pg100.txt +total pg31100.txt, pg3200.txt +total, pg3200.txt +total,.......................320,431 pg3200.txt +total--19 pg3200.txt +total. pg100.txt +totally pg31100.txt, pg3200.txt +totally. pg100.txt +totally; pg3200.txt +toted pg3200.txt +toting?" pg3200.txt +totter pg3200.txt +tottered pg3200.txt +tottering, pg3200.txt +totters. pg100.txt +toucey, pg3200.txt +touch pg31100.txt, pg3200.txt, pg100.txt +touch! pg100.txt +touch!" pg3200.txt +touch'd pg100.txt +touch'd, pg100.txt +touch'd. pg100.txt +touch'd? pg100.txt +touch, pg3200.txt, pg100.txt +touch- pg100.txt +touch-- pg3200.txt +touch-me-nots, pg3200.txt +touch. pg31100.txt, pg3200.txt +touch." pg3200.txt +touch; pg100.txt +touch?" pg3200.txt +touched pg31100.txt, pg3200.txt +touched, pg3200.txt +touched--she pg31100.txt +touched--this pg3200.txt +touched. pg3200.txt +touched? pg3200.txt +touches pg31100.txt, pg3200.txt, pg100.txt +touches, pg3200.txt, pg100.txt +touches. pg3200.txt +touches." pg3200.txt +toucheth pg100.txt +touching pg31100.txt, pg3200.txt, pg100.txt +touching, pg100.txt +touching? pg3200.txt +touchingest pg3200.txt +touchingly, pg3200.txt +touchstone pg100.txt +touchstone? pg100.txt +tough pg3200.txt +tough! pg100.txt +tough, pg31100.txt, pg3200.txt +tough. pg100.txt +toughest pg3200.txt +toughness pg3200.txt +toughness; pg100.txt +toughs. pg3200.txt +toujours pg3200.txt +toul pg3200.txt +tour pg3200.txt +tour, pg3200.txt +tour. pg3200.txt +tourbillon pg3200.txt +tourelles pg3200.txt +tourelles, pg3200.txt +tourelles. pg3200.txt +tourelles." pg3200.txt +tourist pg3200.txt +tourist's. pg3200.txt +tourist. pg3200.txt +tourists pg3200.txt +tourists, pg3200.txt +tourists--swarms pg3200.txt +tourists. pg3200.txt +tournament pg3200.txt +tournament, pg3200.txt +tournament--excursion pg3200.txt +tournaments, pg100.txt +tournanche... pg3200.txt +tours pg3200.txt, pg100.txt +tours, pg100.txt +tout pg3200.txt +touze pg100.txt +tow pg3200.txt +tow'r, pg100.txt +tow'r. pg100.txt +tow'rs pg100.txt +tow-head." pg3200.txt +tow-head?" pg3200.txt +tow-linen pg3200.txt +tow-linens pg3200.txt +toward pg31100.txt, pg3200.txt, pg100.txt +toward! pg100.txt +toward!" pg3200.txt +toward. pg100.txt +toward; pg100.txt +toward? pg100.txt +towards pg31100.txt, pg3200.txt, pg100.txt +towards. pg100.txt +towed pg3200.txt +towed. pg3200.txt +towel pg3200.txt +towel, pg3200.txt +towels pg3200.txt +tower pg3200.txt, pg100.txt +tower, pg3200.txt, pg100.txt +tower- pg100.txt +tower-walls pg100.txt +tower. pg3200.txt, pg100.txt +tower." pg3200.txt +tower; pg3200.txt, pg100.txt +tower? pg100.txt +tower?" pg3200.txt +towered pg3200.txt +towering pg3200.txt +towers pg3200.txt +towers, pg3200.txt, pg100.txt +towers. pg3200.txt +towhead pg3200.txt +towhead, pg3200.txt +towheads, pg3200.txt +town pg31100.txt, pg3200.txt, pg100.txt +town! pg100.txt +town, pg31100.txt, pg3200.txt, pg100.txt +town--" pg3200.txt +town--"tom pg3200.txt +town--boer pg3200.txt +town--calais. pg3200.txt +town--hartford--made pg3200.txt +town--mere pg3200.txt +town--no pg3200.txt +town--that, pg31100.txt +town--the pg3200.txt +town--then pg3200.txt +town--thought pg3200.txt +town. pg31100.txt, pg3200.txt, pg100.txt +town." pg31100.txt, pg3200.txt +town.' pg3200.txt +town; pg31100.txt, pg3200.txt, pg100.txt +town;--there pg31100.txt +town? pg3200.txt, pg100.txt +town?" pg31100.txt, pg3200.txt +town?' pg3200.txt +town] pg100.txt +towns pg3200.txt, pg100.txt +towns, pg3200.txt, pg100.txt +towns. pg3200.txt, pg100.txt +towns; pg3200.txt, pg100.txt +towns? pg100.txt +townsend's pg3200.txt +township pg3200.txt +township. pg100.txt +toy pg3200.txt, pg100.txt +toy! pg100.txt +toy, pg100.txt +toy. pg3200.txt +toys pg31100.txt, pg3200.txt, pg100.txt +toys! pg100.txt +toys. pg100.txt +toys; pg100.txt +toys? pg100.txt +trace pg31100.txt, pg3200.txt, pg100.txt +trace. pg3200.txt +traceable pg3200.txt +traceable, pg3200.txt +traced pg31100.txt, pg3200.txt +traced. pg3200.txt +tracery pg3200.txt +traces pg3200.txt +traces. pg3200.txt +tracing pg3200.txt +track pg3200.txt, pg100.txt +track, pg3200.txt +track. pg3200.txt +track." pg3200.txt +track.] pg3200.txt +track; pg3200.txt +track? pg3200.txt +track?" pg3200.txt +tracked.' pg3200.txt +tracker pg3200.txt +trackless pg3200.txt +tracks pg3200.txt +tracks!" pg3200.txt +tracks, pg3200.txt +tracks--a pg3200.txt +tracks. pg3200.txt +tracks." pg3200.txt +tracks.' pg3200.txt +tracks; pg3200.txt +tract pg3200.txt +tract, pg3200.txt +tract-distributor pg3200.txt +tract. pg3200.txt +tractable pg3200.txt +tractable. pg100.txt +tracts pg3200.txt +tracy pg3200.txt +tracy's pg3200.txt +tracy, pg3200.txt +tracy. pg3200.txt +tracy." pg3200.txt +tracy: pg3200.txt +tracy; pg3200.txt +tracy?" pg3200.txt +trade pg31100.txt, pg3200.txt +trade! pg100.txt +trade!" pg3200.txt +trade, pg31100.txt, pg3200.txt, pg100.txt +trade--and pg3200.txt +trade-mark pg3200.txt +trade-mark, pg3200.txt +trade-phrasings pg3200.txt +trade-profit pg3200.txt +trade-union pg3200.txt +trade-union. pg3200.txt +trade-union." pg3200.txt +trade. pg31100.txt, pg3200.txt, pg100.txt +trade." pg3200.txt +trade: pg3200.txt +trade; pg3200.txt, pg100.txt +trade? pg3200.txt, pg100.txt +trade?" pg3200.txt +traded pg3200.txt +trademark, pg31100.txt, pg3200.txt, pg100.txt +trademark--that pg3200.txt +trademark. pg31100.txt, pg3200.txt, pg100.txt +trader pg3200.txt +trader's pg3200.txt +trader, pg3200.txt +traders pg3200.txt +trades"--mining pg3200.txt +trades, pg100.txt +trades-union pg3200.txt +trades. pg3200.txt +tradesman's pg3200.txt +tradesmen pg3200.txt +trading pg3200.txt +trading-scow, pg3200.txt +tradition pg3200.txt +tradition, pg3200.txt +tradition. pg3200.txt +traditional pg31100.txt, pg3200.txt +traditional. pg100.txt +traditions pg3200.txt +traditions--"women pg3200.txt +traditions. pg3200.txt +traditions: pg3200.txt +traditions; pg3200.txt +traduced pg3200.txt +traducement, pg100.txt +traducer pg3200.txt +traffic pg3200.txt, pg100.txt +traffic. pg3200.txt +traffickers, pg100.txt +tragedian; pg100.txt +tragedians pg3200.txt +tragedians, pg3200.txt +tragedies pg3200.txt +tragedies. pg3200.txt +tragedies? pg100.txt +tragedy pg31100.txt, pg3200.txt, pg100.txt +tragedy! pg100.txt +tragedy, pg3200.txt, pg100.txt +tragedy-trap. pg3200.txt +tragedy. pg3200.txt, pg100.txt +tragedy." pg3200.txt +tragedy.' pg3200.txt +tragedy; pg100.txt +tragic pg3200.txt +tragic? pg3200.txt +tragical. pg100.txt +tragical? pg100.txt +tragoedie!" pg3200.txt +trail pg3200.txt +trail, pg3200.txt, pg100.txt +trail. pg3200.txt +trailing pg3200.txt +trails pg3200.txt +train pg3200.txt, pg100.txt +train! pg3200.txt +train'd pg100.txt +train'd, pg100.txt +train, pg3200.txt, pg100.txt +train," pg3200.txt +train--" pg31100.txt +train--and pg3200.txt +train-boy; pg3200.txt +train-oil--' pg3200.txt +train-time! pg3200.txt +train. pg31100.txt, pg3200.txt, pg100.txt +train." pg3200.txt +train.' pg3200.txt +train: pg100.txt +train; pg3200.txt, pg100.txt +train? pg100.txt +train] pg100.txt +trained pg31100.txt, pg3200.txt +trained--" pg3200.txt +trained? pg3200.txt +training pg3200.txt, pg100.txt +training, pg3200.txt +training--training pg3200.txt +training. pg3200.txt +training; pg3200.txt +trains pg3200.txt, pg100.txt +trains, pg3200.txt +trains. pg3200.txt +trains; pg3200.txt +trait pg31100.txt, pg3200.txt +trait." pg3200.txt +traitor pg3200.txt, pg100.txt +traitor! pg100.txt +traitor!" pg3200.txt +traitor, pg100.txt +traitor. pg3200.txt, pg100.txt +traitor; pg100.txt +traitor? pg100.txt +traitorous pg100.txt +traitorously. pg100.txt +traitors pg3200.txt, pg100.txt +traitors! pg100.txt +traitors, pg100.txt +traitors. pg100.txt +traitors; pg100.txt +traitors? pg100.txt +traits pg31100.txt, pg3200.txt +tramp pg3200.txt +tramp! pg3200.txt +tramp, pg3200.txt +tramp--" pg3200.txt +tramp. pg3200.txt +tramp; pg3200.txt +tramped pg3200.txt +tramping pg3200.txt +tramping. pg3200.txt +trample pg3200.txt +trampled pg3200.txt +trampled-on, pg3200.txt +trampling pg3200.txt +tramplings, pg3200.txt +tramps pg3200.txt +tramps, pg3200.txt +tramps. pg3200.txt +tranc'd. pg100.txt +trance pg3200.txt +trance, pg3200.txt +trance--oh, pg3200.txt +trance. pg3200.txt, pg100.txt +tranced pg3200.txt +tranio pg100.txt +tranio! pg100.txt +tranio, pg100.txt +tranio. pg100.txt +tranio; pg100.txt +tranio? pg100.txt +tranquil pg31100.txt, pg3200.txt +tranquil! pg3200.txt +tranquil, pg3200.txt +tranquil--give pg3200.txt +tranquil. pg31100.txt +tranquilest pg3200.txt +tranquility pg3200.txt +tranquility. pg3200.txt +tranquilizing. pg3200.txt +tranquillised pg31100.txt +tranquillity pg31100.txt, pg3200.txt +tranquillity, pg31100.txt +tranquillity. pg31100.txt, pg3200.txt +tranquillity." pg3200.txt +tranquilly pg3200.txt +tranquilly, pg3200.txt +tranquilly. pg3200.txt +tranquilly: pg3200.txt +transaction pg31100.txt +transaction, pg3200.txt +transaction. pg3200.txt +transactions." pg31100.txt +transcended pg3200.txt +transcendence; pg100.txt +transcending pg3200.txt +transcends. pg100.txt +transcribed pg31100.txt +transcribing pg31100.txt +transfer pg31100.txt, pg3200.txt +transferable, pg3200.txt +transferred pg3200.txt +transferred. pg100.txt +transfigured pg3200.txt +transfigured, pg3200.txt +transfixed! pg3200.txt +transform pg3200.txt, pg100.txt +transform'd pg100.txt +transformation pg3200.txt +transformation! pg100.txt +transformation, pg100.txt +transformation. pg3200.txt +transformations pg100.txt +transformed pg3200.txt +transformed, pg100.txt +transforming pg3200.txt +transgress'd pg100.txt +transgress'd. pg100.txt +transgresses pg100.txt +transgression pg100.txt +transgression, pg100.txt +transgression. pg100.txt +transgressions, pg31100.txt +transgressors pg3200.txt +transient pg3200.txt +transient, pg3200.txt +transient. pg3200.txt +transit pg3200.txt +transit. pg3200.txt +transition pg31100.txt, pg3200.txt +transition, pg3200.txt +translatable pg3200.txt +translate pg3200.txt +translate! pg100.txt +translate): pg3200.txt +translate, pg3200.txt +translate. pg3200.txt +translate: pg3200.txt +translated pg3200.txt, pg100.txt +translated. pg100.txt +translating pg3200.txt +translation pg3200.txt +translation, pg3200.txt +translation. pg3200.txt +translation." pg3200.txt +translation] pg3200.txt +translations pg3200.txt +translator pg3200.txt +translator. pg3200.txt +translators pg3200.txt +transmigrates. pg100.txt +transmissible pg3200.txt +transmissible. pg3200.txt +transmitted pg31100.txt, pg3200.txt +transmitted. pg3200.txt +transmitter, pg3200.txt +transom!" pg3200.txt +transparent pg3200.txt +transparently pg3200.txt +transpired pg3200.txt +transpired. pg31100.txt, pg3200.txt +transpirer. pg3200.txt +transpires pg3200.txt +transplantation pg31100.txt +transport pg31100.txt, pg3200.txt +transportation pg3200.txt +transportation, pg3200.txt +transportation; pg3200.txt +transported pg100.txt +transported. pg100.txt +transporting pg3200.txt +transports pg31100.txt, pg3200.txt +transpose. pg100.txt +transpositions pg3200.txt +transvaal, pg3200.txt +transvaal. pg3200.txt +transvaal." pg3200.txt +trap pg3200.txt +trap, pg3200.txt +trap. pg3200.txt +trap." pg3200.txt +trap: pg3200.txt +trapp'd pg100.txt +trapp'd, pg100.txt +trapper, pg3200.txt +trappings pg3200.txt +trappings. pg100.txt +traps pg3200.txt +traps, pg3200.txt +traps. pg100.txt +trapse pg3200.txt +trash pg3200.txt, pg100.txt +trash. pg100.txt +traum pg3200.txt +traum. pg3200.txt +travail pg100.txt +travel pg31100.txt, pg3200.txt, pg100.txt +travel, pg31100.txt, pg3200.txt, pg100.txt +travel--but pg31100.txt +travel--in pg3200.txt +travel-book, pg3200.txt +travel. pg31100.txt, pg3200.txt, pg100.txt +travel? pg100.txt +travel?" pg31100.txt +traveled pg3200.txt +traveled, pg3200.txt +traveled. pg3200.txt +traveled; pg3200.txt +traveler pg3200.txt +traveler, pg3200.txt +traveler--a pg3200.txt +traveler. pg3200.txt +traveler; pg3200.txt +travelers pg3200.txt +travelers. pg3200.txt +travelers." pg3200.txt +traveling pg3200.txt +traveling, pg3200.txt +traveling. pg3200.txt +travelled pg31100.txt, pg3200.txt +travelled, pg3200.txt +traveller pg3200.txt +traveller, pg31100.txt, pg100.txt +traveller. pg100.txt +traveller; pg100.txt +travellers. pg31100.txt, pg100.txt +travellers." pg31100.txt +travelling pg31100.txt +travelling. pg3200.txt +travels pg3200.txt +travels, pg3200.txt +travels. pg3200.txt +travers pg100.txt +traverse pg3200.txt +traverse; pg100.txt +traversed pg3200.txt +traversed. pg3200.txt +traverses pg3200.txt +traversing pg3200.txt +travesty pg3200.txt +travesty, pg3200.txt +trays pg3200.txt +treacheries pg3200.txt +treacherous pg3200.txt +treacherous, pg3200.txt, pg100.txt +treacherous. pg100.txt +treacherous; pg3200.txt +treachery pg31100.txt, pg3200.txt, pg100.txt +treachery! pg100.txt +treachery!" pg3200.txt +treachery, pg31100.txt, pg100.txt +treachery--" pg3200.txt +treachery. pg3200.txt, pg100.txt +treachery? pg100.txt +tread pg31100.txt, pg3200.txt, pg100.txt +tread! pg100.txt +tread, pg100.txt +tread- pg100.txt +tread. pg31100.txt, pg100.txt +treading pg3200.txt +treads pg100.txt +treason pg100.txt +treason! pg100.txt +treason, pg3200.txt, pg100.txt +treason. pg3200.txt, pg100.txt +treason: pg100.txt +treason; pg100.txt +treasonable pg100.txt +treasonous. pg100.txt +treasons pg100.txt +treasons, pg100.txt +treasons: pg100.txt +treasure pg31100.txt, pg3200.txt, pg100.txt +treasure! pg100.txt +treasure, pg31100.txt, pg3200.txt, pg100.txt +treasure-chest pg3200.txt +treasure-hill pg3200.txt +treasure-house! pg100.txt +treasure-hunting. pg3200.txt +treasure. pg3200.txt, pg100.txt +treasure." pg3200.txt +treasure.' pg31100.txt +treasure; pg31100.txt +treasure? pg100.txt +treasure?" pg3200.txt +treasured pg3200.txt +treasurer pg3200.txt +treasurer's pg3200.txt +treasurer. pg3200.txt +treasurer; pg3200.txt +treasures pg31100.txt, pg3200.txt +treasures, pg3200.txt +treasures. pg3200.txt +treasures." pg3200.txt +treasures; pg31100.txt +treasures] pg3200.txt +treasuries. pg100.txt +treasury pg3200.txt, pg100.txt +treasury. pg3200.txt, pg100.txt +treasury." pg3200.txt +treasury; pg3200.txt +treasury?" pg3200.txt +treasury?' pg3200.txt +treat pg31100.txt, pg3200.txt +treat, pg3200.txt +treat. pg3200.txt +treat." pg31100.txt +treated pg31100.txt, pg3200.txt +treated--if pg3200.txt +treated. pg3200.txt +treated; pg31100.txt +treating pg31100.txt +treatise pg3200.txt +treatise. pg100.txt +treatment pg31100.txt, pg3200.txt +treatment, pg31100.txt, pg3200.txt +treatment. pg31100.txt, pg3200.txt +treatment." pg31100.txt +treatment? pg3200.txt +treats pg31100.txt, pg3200.txt +treaty pg3200.txt, pg100.txt +treaty, pg3200.txt +treaty. pg3200.txt +treaty." pg3200.txt +treble pg3200.txt +trebonius pg3200.txt +trebonius, pg100.txt +trebonius. pg100.txt +tree pg31100.txt, pg3200.txt, pg100.txt +tree, pg31100.txt, pg3200.txt, pg100.txt +tree--" pg3200.txt +tree----" pg3200.txt +tree--she pg3200.txt +tree--warning pg3200.txt +tree-top pg3200.txt +tree-tops pg3200.txt +tree. pg31100.txt, pg3200.txt, pg100.txt +tree." pg31100.txt, pg3200.txt +tree.' pg3200.txt +tree.'" pg3200.txt +tree; pg3200.txt +tree? pg100.txt +tree?" pg3200.txt +tree] pg100.txt +treed pg3200.txt +treeless pg3200.txt +treeless, pg3200.txt +trees pg31100.txt, pg3200.txt, pg100.txt +trees! pg100.txt +trees!" pg3200.txt +trees, pg31100.txt, pg3200.txt, pg100.txt +trees,--a pg3200.txt +trees--and pg3200.txt +trees-tropical pg3200.txt +trees. pg31100.txt, pg3200.txt, pg100.txt +trees." pg3200.txt +trees; pg100.txt +trees? pg100.txt +treffen pg3200.txt +trellis-work pg3200.txt +tremble pg31100.txt, pg3200.txt, pg100.txt +tremble!" pg3200.txt +tremble, pg3200.txt, pg100.txt +tremble- pg100.txt +tremble. pg31100.txt, pg3200.txt, pg100.txt +trembled pg31100.txt, pg3200.txt +trembled, pg3200.txt +trembled. pg3200.txt +trembled.] pg3200.txt +trembles. pg100.txt +trembling pg31100.txt, pg3200.txt +trembling, pg31100.txt, pg3200.txt +trembling. pg31100.txt, pg3200.txt, pg100.txt +tremblingly; pg3200.txt +tremendous pg3200.txt +tremendously pg3200.txt +tremens. pg3200.txt +tremor pg31100.txt, pg3200.txt +tremouille pg3200.txt +tremouille. pg3200.txt +trempeleau pg3200.txt +trempling pg100.txt +tremulous pg31100.txt +tremulousness pg3200.txt +trench pg100.txt +trencher pg100.txt +trencher! pg100.txt +trencher. pg100.txt +trencher; pg100.txt +trenches pg3200.txt +trenches, pg3200.txt +trenches. pg100.txt +trenches? pg100.txt +trend, pg3200.txt +trend--but pg3200.txt +trent. pg100.txt +trent." pg31100.txt +trenton, pg3200.txt +trepidation. pg31100.txt +trepidation: pg3200.txt +trespass pg31100.txt, pg3200.txt, pg100.txt +trespass! pg3200.txt +trespass. pg100.txt +trespass." pg3200.txt +trespassed pg3200.txt +trespasser, pg3200.txt +trespasses pg100.txt +trespasses, pg100.txt +trespassing pg31100.txt +trestle, pg3200.txt +trestle-work pg3200.txt +trestle-work!" pg3200.txt +trial pg31100.txt, pg3200.txt, pg100.txt +trial! pg100.txt +trial, pg3200.txt, pg100.txt +trial-day! pg100.txt +trial. pg31100.txt, pg3200.txt, pg100.txt +trial." pg3200.txt +trial: pg3200.txt +trial; pg100.txt +trial? pg3200.txt +trial?" pg3200.txt +trials pg3200.txt +trials, pg31100.txt, pg3200.txt +trials. pg3200.txt +trials; pg3200.txt +triangle, pg3200.txt +triangle. pg3200.txt +trianon pg3200.txt +trib, pg100.txt +tribbilation." pg3200.txt +tribe pg3200.txt, pg100.txt +tribe, pg3200.txt, pg100.txt +tribe--or pg3200.txt +tribe. pg3200.txt +tribe; pg3200.txt, pg100.txt +tribes pg3200.txt +tribes, pg3200.txt +tribes. pg100.txt +tribes; pg3200.txt +tribes? pg100.txt +tribulation pg100.txt +tribunal pg3200.txt +tribunal. pg3200.txt +tribunals." pg3200.txt +tribune pg3200.txt, pg100.txt +tribune! pg100.txt +tribune" pg3200.txt +tribune' pg3200.txt +tribune, pg3200.txt +tribune. pg3200.txt, pg100.txt +tribune; pg3200.txt +tribunes pg100.txt +tribunes, pg100.txt +tribunes- pg100.txt +tribunes. pg100.txt +tribunes; pg100.txt +tributaries pg100.txt +tributary pg3200.txt +tributary, pg100.txt +tribute pg31100.txt, pg3200.txt +tribute, pg100.txt +tribute. pg3200.txt, pg100.txt +tribute." pg3200.txt +tribute; pg100.txt +tributes pg3200.txt +tributes. pg3200.txt +trice pg100.txt +trice, pg100.txt +trice. pg31100.txt, pg3200.txt, pg100.txt +triced pg3200.txt +trick pg31100.txt, pg3200.txt, pg100.txt +trick'd pg100.txt +trick, pg31100.txt +trick. pg3200.txt, pg100.txt +trick; pg3200.txt +trick? pg100.txt +tricked pg31100.txt, pg3200.txt +trickeries pg3200.txt +trickeries, pg3200.txt +trickle pg3200.txt +trickling pg3200.txt +tricks pg31100.txt, pg3200.txt, pg100.txt +tricks! pg100.txt +tricks" pg31100.txt +tricks, pg3200.txt, pg100.txt +tricks. pg3200.txt, pg100.txt +tricks." pg31100.txt +tricks._"] pg31100.txt +tricks._] pg31100.txt +tricks? pg100.txt +trickster pg3200.txt +trident, pg100.txt +triebe-- pg3200.txt +tried pg31100.txt, pg3200.txt, pg100.txt +tried! pg3200.txt +tried, pg3200.txt, pg100.txt +tried--but pg31100.txt +tried. pg3200.txt, pg100.txt +tried." pg3200.txt +tried; pg3200.txt +tried?" pg3200.txt +tries pg31100.txt, pg3200.txt +trifle pg31100.txt, pg3200.txt, pg100.txt +trifle! pg3200.txt +trifle, pg31100.txt, pg3200.txt, pg100.txt +trifle. pg3200.txt, pg100.txt +trifle." pg31100.txt, pg3200.txt +trifle; pg31100.txt, pg3200.txt, pg100.txt +trifle? pg31100.txt +trifled pg31100.txt, pg3200.txt +trifler pg3200.txt +trifles pg3200.txt, pg100.txt +trifles), pg3200.txt +trifles, pg3200.txt +trifles. pg3200.txt, pg100.txt +trifles? pg3200.txt +trifling pg31100.txt, pg3200.txt +trifling, pg31100.txt +trifling--a pg3200.txt +trifling. pg3200.txt +trifling." pg31100.txt +trifling; pg31100.txt, pg100.txt +trigger pg3200.txt +trill'd pg100.txt +trills pg3200.txt +trim pg3200.txt, pg100.txt +trim! pg100.txt +trim) pg100.txt +trim, pg100.txt +trim; pg100.txt +trimmed pg3200.txt +trimmed. pg3200.txt +trimming pg3200.txt +trimming? pg100.txt +trimmings. pg3200.txt +trimmings." pg3200.txt +trimness pg3200.txt +trinculo pg100.txt +trinculo. pg100.txt +trinculo? pg100.txt +trinculos? pg100.txt +trinity pg3200.txt +trinity: pg3200.txt +trinity; pg3200.txt +trinkets pg3200.txt +trio pg3200.txt +trip pg31100.txt, pg3200.txt +trip!" pg3200.txt +trip" pg3200.txt +trip), pg3200.txt +trip, pg3200.txt +trip. pg3200.txt +trip.' pg3200.txt +trip; pg3200.txt +trip?" pg3200.txt +trip?' pg3200.txt +tripe pg3200.txt +triplets, pg3200.txt +tripolis. pg100.txt +tripped pg3200.txt +tripping pg100.txt +tripping." pg3200.txt +trippingly. pg100.txt +trips pg3200.txt +trite, pg3200.txt +triumph pg31100.txt, pg3200.txt, pg100.txt +triumph, pg31100.txt, pg3200.txt +triumph--and pg31100.txt +triumph--there'll pg3200.txt +triumph-day. pg100.txt +triumph. pg31100.txt, pg3200.txt, pg100.txt +triumph." pg31100.txt +triumph; pg3200.txt +triumph? pg3200.txt, pg100.txt +triumphal pg3200.txt +triumphant pg31100.txt, pg3200.txt +triumphant, pg3200.txt +triumphantly, pg100.txt +triumphers pg100.txt +triumphing pg31100.txt +triumphing. pg100.txt +triumvirate pg100.txt +triumvirs pg100.txt +trivet, pg3200.txt +trivial pg31100.txt, pg3200.txt +trivial, pg100.txt +trivial. pg3200.txt +trivialities pg3200.txt +trivialities, pg3200.txt +trod pg3200.txt, pg100.txt +trod, pg3200.txt +trod; pg3200.txt, pg100.txt +trodden pg100.txt +troglodytes; pg3200.txt +troien, pg100.txt +troilus pg100.txt +troilus! pg100.txt +troilus' pg100.txt +troilus, pg100.txt +troilus- pg100.txt +troilus. pg100.txt +troilus; pg100.txt +troilus? pg100.txt +trojan pg3200.txt +trojans pg3200.txt +trolley pg3200.txt +trolley, pg3200.txt +trollop pg3200.txt +trollop's pg3200.txt +trollop, pg3200.txt +trollop. pg3200.txt +trollop." pg3200.txt +tromped pg3200.txt +tromperies. pg100.txt +trompled pg3200.txt +troop pg31100.txt, pg3200.txt, pg100.txt +troop, pg100.txt +troop. pg100.txt +troop; pg100.txt +troop? pg100.txt +trooping pg3200.txt +troops pg3200.txt, pg100.txt +troops, pg3200.txt, pg100.txt +troops. pg3200.txt, pg100.txt +troops." pg3200.txt +troops; pg100.txt +troops? pg3200.txt +trophies pg3200.txt +trophies. pg31100.txt, pg3200.txt +tropic pg3200.txt +tropical pg3200.txt +tropical, pg3200.txt +tropics pg3200.txt +tropics!" pg3200.txt +tropics, pg3200.txt +tropics. pg3200.txt +tropics." pg3200.txt +trot pg3200.txt, pg100.txt +trot, pg3200.txt, pg100.txt +trot. pg3200.txt, pg100.txt +troth pg3200.txt, pg100.txt +troth! pg100.txt +troth, pg100.txt +troth. pg100.txt +troth." pg100.txt +troth; pg100.txt +trotted pg3200.txt +trouble pg31100.txt, pg3200.txt, pg100.txt +trouble!" pg3200.txt +trouble, pg31100.txt, pg3200.txt, pg100.txt +trouble," pg31100.txt +trouble. pg31100.txt, pg3200.txt, pg100.txt +trouble." pg31100.txt, pg3200.txt +trouble.' pg3200.txt +trouble... pg3200.txt +trouble: pg3200.txt +trouble; pg3200.txt, pg100.txt +trouble? pg100.txt +trouble?" pg3200.txt +trouble?--what pg3200.txt +trouble[4]. pg3200.txt +troubled pg3200.txt, pg100.txt +troubled, pg3200.txt +troubled- pg100.txt +troubled. pg3200.txt +troubled; pg3200.txt, pg100.txt +troubles pg3200.txt, pg100.txt +troubles! pg3200.txt +troubles, pg3200.txt, pg100.txt +troubles. pg3200.txt, pg100.txt +troubles." pg3200.txt +troubles; pg3200.txt, pg100.txt +troublesome pg31100.txt, pg3200.txt +troublesome, pg31100.txt, pg3200.txt, pg100.txt +troublesome. pg3200.txt, pg100.txt +troublesome." pg31100.txt +troublesome?' pg3200.txt +troubling pg3200.txt +troublous pg100.txt +trough pg3200.txt, pg100.txt +troughs pg3200.txt +troughs, pg3200.txt +trousers pg3200.txt +trousers, pg3200.txt +trout pg3200.txt, pg100.txt +trout, pg3200.txt +trove," pg3200.txt +trow, pg100.txt +trow- pg100.txt +trow. pg3200.txt, pg100.txt +trow? pg100.txt +trowel. pg100.txt +trowest, pg100.txt +trowsers, pg3200.txt +troy pg3200.txt, pg100.txt +troy! pg100.txt +troy, pg100.txt +troy. pg100.txt +troy; pg100.txt +troy? pg100.txt +troyan pg100.txt +troyan! pg100.txt +troyan, pg100.txt +troyan. pg100.txt +troyans, pg100.txt +troyans. pg100.txt +troyes pg3200.txt +troyes. pg3200.txt +truant, pg100.txt +truant. pg100.txt +truants pg3200.txt +trubble pg3200.txt +truce pg3200.txt, pg100.txt +truce! pg3200.txt +truce!" pg3200.txt +truce, pg3200.txt, pg100.txt +truce. pg3200.txt +truce; pg100.txt +truceless pg3200.txt +truck pg3200.txt +truck. pg3200.txt +truck?" pg3200.txt +truckle-bed; pg100.txt +trudge pg100.txt +trudge. pg100.txt +trudged pg3200.txt +true pg31100.txt, pg3200.txt, pg100.txt +true! pg31100.txt, pg3200.txt, pg100.txt +true!) pg3200.txt +true!- pg100.txt +true, pg31100.txt, pg3200.txt, pg100.txt +true," pg31100.txt +true- pg100.txt +true--" pg3200.txt +true--but pg3200.txt +true--i pg3200.txt +true--surely pg3200.txt +true--true pg3200.txt +true-bred pg100.txt +true-hearted, pg3200.txt +true-love pg100.txt +true. pg31100.txt, pg3200.txt, pg100.txt +true." pg31100.txt, pg3200.txt +true.' pg3200.txt +true: pg3200.txt, pg100.txt +true; pg31100.txt, pg3200.txt, pg100.txt +true? pg3200.txt, pg100.txt +true?" pg31100.txt, pg3200.txt +true?' pg3200.txt +true?--has pg31100.txt +truepenny? pg100.txt +truer pg3200.txt, pg100.txt +truer, pg100.txt +truer-hearted pg100.txt +truest pg3200.txt, pg100.txt +truest; pg100.txt +truffles. pg3200.txt +truisms pg3200.txt +trull pg100.txt +trull, pg100.txt +truly pg31100.txt, pg3200.txt, pg100.txt +truly!" pg31100.txt +truly, pg3200.txt, pg100.txt +truly- pg100.txt +truly. pg3200.txt, pg100.txt +truly." pg3200.txt +truly; pg100.txt +trumbull's pg3200.txt +trump, pg100.txt +trump? pg100.txt +trumpery; pg100.txt +trumpet pg3200.txt, pg100.txt +trumpet, pg3200.txt +trumpet. pg100.txt +trumpet; pg100.txt +trumpet] pg100.txt +trumpeter pg100.txt +trumpeter, pg100.txt +trumpeter; pg100.txt +trumpeters pg3200.txt, pg100.txt +trumpeters, pg100.txt +trumpets pg3200.txt, pg100.txt +trumpets. pg3200.txt, pg100.txt +trumpets] pg100.txt +truncheon; pg100.txt +trundle-tall- pg100.txt +trunk pg31100.txt, pg3200.txt, pg100.txt +trunk's pg3200.txt +trunk, pg3200.txt, pg100.txt +trunk--there pg3200.txt +trunk. pg3200.txt, pg100.txt +trunk: pg3200.txt +trunk; pg100.txt +trunk] pg100.txt +trunks pg3200.txt +trunks, pg3200.txt, pg100.txt +trunks; pg3200.txt +trust pg31100.txt, pg3200.txt, pg100.txt +trust! pg3200.txt, pg100.txt +trust!" pg31100.txt, pg3200.txt +trust's pg3200.txt +trust, pg31100.txt, pg3200.txt, pg100.txt +trust--if pg3200.txt +trust-of pg100.txt +trust. pg3200.txt, pg100.txt +trust." pg3200.txt +trust: pg100.txt +trust; pg100.txt +trust? pg3200.txt, pg100.txt +trusted pg31100.txt, pg3200.txt, pg100.txt +trusted) pg31100.txt +trusted, pg31100.txt, pg100.txt +trusted. pg3200.txt +trusted." pg31100.txt +trusted; pg100.txt +trustees pg3200.txt +trustees, pg3200.txt +trustees: pg3200.txt +trustees; pg3200.txt +trustfulness. pg3200.txt +trusting pg31100.txt, pg3200.txt +trusting, pg3200.txt +trusting--so pg3200.txt +trusts pg3200.txt +trustworthiness pg3200.txt +trustworthy pg3200.txt +trustworthy, pg3200.txt +trustworthy. pg3200.txt +trusty pg3200.txt, pg100.txt +truth pg31100.txt, pg3200.txt, pg100.txt +truth! pg100.txt +truth, pg31100.txt, pg3200.txt, pg100.txt +truth- pg100.txt +truth--" pg3200.txt +truth--ain't pg3200.txt +truth--does pg3200.txt +truth--have pg3200.txt +truth-seeker. pg3200.txt +truth-seeking--hunting pg3200.txt +truth. pg31100.txt, pg3200.txt, pg100.txt +truth." pg31100.txt, pg3200.txt +truth.' pg3200.txt +truth: pg100.txt +truth; pg31100.txt, pg3200.txt, pg100.txt +truth? pg100.txt +truth?" pg31100.txt, pg3200.txt +truthful pg3200.txt +truthful, pg3200.txt +truthful?" pg3200.txt +truthfully, pg3200.txt +truthfulness pg3200.txt +truths pg31100.txt, pg3200.txt +truths, pg31100.txt, pg100.txt +truths. pg100.txt +try pg31100.txt, pg3200.txt, pg100.txt +try, pg3200.txt, pg100.txt +try--free pg3200.txt +try. pg3200.txt, pg100.txt +try." pg3200.txt +try: pg3200.txt +try; pg3200.txt, pg100.txt +try? pg3200.txt +try?" pg3200.txt +tryal pg31100.txt +trying pg31100.txt, pg3200.txt +trying, pg3200.txt +trying. pg3200.txt +trying." pg3200.txt +tub pg3200.txt, pg100.txt +tub!' pg3200.txt +tub, pg3200.txt +tub. pg3200.txt, pg100.txt +tubal pg100.txt +tubal. pg100.txt +tubbs pg3200.txt +tube: pg3200.txt +tuberose, pg3200.txt +tubs pg3200.txt +tuck pg3200.txt +tuck! pg100.txt +tucked pg3200.txt +tuckered pg3200.txt +tucket] pg100.txt +tucking pg3200.txt +tudor pg3200.txt +tudors pg3200.txt +tudors. pg3200.txt +tuesday pg31100.txt, pg3200.txt +tuesday, pg31100.txt +tuesday. pg3200.txt +tuesday." pg31100.txt +tuft pg3200.txt +tuft, pg3200.txt +tufted pg3200.txt +tufts pg3200.txt +tug pg3200.txt +tug, pg3200.txt +tug-load pg3200.txt +tugged pg3200.txt +tugging pg3200.txt, pg100.txt +tugs pg3200.txt +tuition pg31100.txt +tuition, pg3200.txt +tuk pg3200.txt +tulips, pg3200.txt +tullus! pg100.txt +tullus, pg100.txt +tumble pg3200.txt, pg100.txt +tumble-bug pg3200.txt +tumble-bug!" pg3200.txt +tumble-bug. pg3200.txt +tumble-bugs pg3200.txt +tumbled pg3200.txt +tumbling pg31100.txt, pg3200.txt +tumbling, pg3200.txt +tumbling-trick? pg100.txt +tumblings pg3200.txt +tumult pg3200.txt +tumultuous pg3200.txt +tumut pg3200.txt +tun pg100.txt +tun'd. pg100.txt +tun, pg3200.txt +tune pg31100.txt, pg3200.txt, pg100.txt +tune! pg3200.txt +tune, pg3200.txt, pg100.txt +tune. pg3200.txt, pg100.txt +tune; pg100.txt +tune? pg100.txt +tuneable pg100.txt +tuned pg3200.txt +tunes pg3200.txt +tunes, pg3200.txt, pg100.txt +tunes. pg100.txt +tungkillo pg3200.txt +tunis, pg100.txt +tunis. pg100.txt +tunis; pg100.txt +tunnel pg3200.txt +tunnel, pg3200.txt +tunnel. pg3200.txt +tunnels pg3200.txt +tunnels, pg3200.txt +turban pg3200.txt +turbaned, pg3200.txt +turbans pg3200.txt +turbulent pg3200.txt +turd. pg100.txt +turf pg31100.txt, pg3200.txt, pg100.txt +turf, pg100.txt +turk pg100.txt +turk! pg100.txt +turk, pg100.txt +turk. pg3200.txt, pg100.txt +turk: pg100.txt +turk?" pg3200.txt +turkey pg3200.txt +turkey-cock. pg100.txt +turkey-cocks. pg100.txt +turkey. pg3200.txt +turkeys pg3200.txt +turkeys, pg3200.txt +turkeys; pg3200.txt +turkish pg3200.txt +turks pg3200.txt +turks, pg100.txt +turmoil pg3200.txt +turmoil, pg3200.txt, pg100.txt +turmoils pg3200.txt +turn pg31100.txt, pg3200.txt, pg100.txt +turn! pg100.txt +turn!" pg3200.txt +turn' pg3200.txt +turn'd pg100.txt +turn'd, pg100.txt +turn'd. pg100.txt +turn'd? pg100.txt +turn, pg31100.txt, pg3200.txt, pg100.txt +turn,) pg3200.txt +turn- pg100.txt +turn--i pg3200.txt +turn--it pg3200.txt +turn-out." pg3200.txt +turn. pg3200.txt, pg100.txt +turn; pg100.txt +turn? pg100.txt +turn?" pg31100.txt +turnabout pg3200.txt +turned pg31100.txt, pg3200.txt, pg100.txt +turned! pg100.txt +turned, pg3200.txt, pg100.txt +turned--but pg3200.txt +turned. pg31100.txt, pg3200.txt +turner pg3200.txt +turner's pg31100.txt +turner, pg3200.txt +turner. pg3200.txt +turners pg3200.txt +turnin' pg3200.txt +turning pg31100.txt, pg3200.txt, pg100.txt +turning, pg100.txt +turning-point pg3200.txt +turning. pg100.txt +turning." pg31100.txt +turning; pg100.txt +turnip, pg31100.txt, pg3200.txt +turnip. pg3200.txt +turnip? pg3200.txt +turnips pg3200.txt +turnips--a pg3200.txt +turnips--if pg3200.txt +turnips. pg3200.txt, pg100.txt +turnpike-road. pg31100.txt +turns pg31100.txt, pg3200.txt, pg100.txt +turns, pg3200.txt +turns. pg31100.txt, pg3200.txt, pg100.txt +turns; pg100.txt +turpentine pg3200.txt +turpentine! pg3200.txt +turpentine, pg3200.txt +turpentine; pg3200.txt +turpitude pg100.txt +turret, pg3200.txt +turret-looking pg3200.txt +turret?" pg3200.txt +turreted pg3200.txt +turrets pg3200.txt +turrets, pg3200.txt, pg100.txt +turrible pg3200.txt +turrible--an' pg3200.txt +turtle, pg100.txt +turtle-doves pg100.txt +turtles--they pg3200.txt +tuscany. pg3200.txt +tush! pg100.txt +tushes pg3200.txt +tusked pg3200.txt +tuskeegee pg3200.txt +tusks pg3200.txt +tut! pg100.txt +tutor pg100.txt +tutor'd, pg100.txt +tutor- pg100.txt +tutor. pg100.txt +tutors, pg100.txt +tutorship pg3200.txt +tuttletown. pg3200.txt +tuxedo). pg3200.txt +twaddle pg3200.txt +twaddle, pg3200.txt +twain pg3200.txt, pg100.txt +twain! pg100.txt +twain!" pg3200.txt +twain!' pg3200.txt +twain!-----" pg3200.txt +twain" pg3200.txt +twain' pg3200.txt +twain's pg3200.txt +twain's." pg3200.txt +twain, pg3200.txt, pg100.txt +twain," pg3200.txt +twain- pg100.txt +twain. pg3200.txt, pg100.txt +twain." pg3200.txt +twain."] pg3200.txt +twain.' pg3200.txt +twain.] pg3200.txt +twain: pg3200.txt, pg100.txt +twain; pg100.txt +twain? pg100.txt +twang'd pg100.txt +tway. pg100.txt +twelfth pg3200.txt +twelve pg3200.txt, pg100.txt +twelve, pg31100.txt, pg3200.txt, pg100.txt +twelve-and-thirty-two-pounders, pg3200.txt +twelve. pg3200.txt, pg100.txt +twelve." pg31100.txt +twelve; pg31100.txt, pg3200.txt +twelvemonth pg31100.txt +twelvemonth, pg31100.txt, pg3200.txt +twelvemonth. pg31100.txt +twelvemonth." pg31100.txt +twenties. pg3200.txt +twentieth pg3200.txt +twentieth. pg3200.txt +twenty pg31100.txt, pg3200.txt, pg100.txt +twenty, pg31100.txt, pg3200.txt, pg100.txt +twenty- pg3200.txt +twenty-dollar pg3200.txt +twenty-eight!" pg3200.txt +twenty-fifth pg3200.txt +twenty-five pg3200.txt +twenty-five! pg3200.txt +twenty-five!' pg3200.txt +twenty-five, pg3200.txt +twenty-five. pg3200.txt +twenty-five." pg31100.txt, pg3200.txt +twenty-four pg3200.txt +twenty-four, pg3200.txt +twenty-four. pg31100.txt +twenty-nine pg31100.txt, pg3200.txt, pg100.txt +twenty-nine, pg3200.txt +twenty-nine. pg3200.txt +twenty-one, pg3200.txt, pg100.txt +twenty-one--a pg3200.txt +twenty-one--it pg3200.txt +twenty-seven pg31100.txt, pg3200.txt +twenty-seven, pg31100.txt +twenty-seven. pg3200.txt +twenty-six pg3200.txt +twenty-six--twenty-six pg3200.txt +twenty-three pg3200.txt +twenty-three, pg3200.txt +twenty-three. pg3200.txt +twenty-two pg3200.txt +twenty-two, pg3200.txt +twenty-two; pg3200.txt +twenty. pg3200.txt, pg100.txt +twenty." pg3200.txt +twenty; pg100.txt +twenty? pg31100.txt, pg3200.txt +twice pg31100.txt, pg3200.txt, pg100.txt +twice! pg31100.txt +twice) pg3200.txt +twice, pg31100.txt, pg3200.txt +twice-- pg3200.txt +twice. pg31100.txt, pg3200.txt, pg100.txt +twice." pg3200.txt +twice: pg100.txt +twice; pg3200.txt +twice? pg100.txt +twice?" pg31100.txt +twichell pg3200.txt +twichell's, pg3200.txt +twichell, pg3200.txt +twichell. pg3200.txt +twichell.) pg3200.txt +twichell: pg3200.txt +twickenham, pg31100.txt +twig pg3200.txt +twigs pg100.txt +twigs. pg100.txt +twilight pg3200.txt +twilight, pg31100.txt, pg3200.txt +twilight. pg3200.txt +twilights, pg3200.txt +twin pg3200.txt, pg100.txt +twin, pg3200.txt +twin." pg3200.txt +twin?" pg3200.txt +twine pg100.txt +twined pg3200.txt +twinges pg3200.txt +twink. pg100.txt +twinkle pg3200.txt +twinkling pg3200.txt +twinkling, pg3200.txt +twinkling. pg3200.txt, pg100.txt +twinned? pg3200.txt +twins pg3200.txt +twins!" pg3200.txt +twins, pg3200.txt +twins--'" pg3200.txt +twins. pg3200.txt +twins." pg3200.txt +twist pg3200.txt +twist. pg3200.txt +twisted pg31100.txt, pg3200.txt +twisting pg3200.txt +twistings pg3200.txt +twitch pg3200.txt +twitted pg3200.txt +twitter." pg3200.txt +two! pg100.txt +two!" pg3200.txt +two" pg3200.txt +two's pg3200.txt +two, pg31100.txt, pg3200.txt, pg100.txt +two," pg31100.txt +two- pg3200.txt +two--" pg3200.txt +two--and pg3200.txt +two--but pg3200.txt +two--from pg31100.txt +two--he pg3200.txt +two--if pg3200.txt +two--whitelaw pg3200.txt +two-and-a-half pg3200.txt +two-and-twenty pg100.txt +two-carat pg3200.txt +two-fold pg100.txt +two-horse pg3200.txt +two-inch pg3200.txt +two-legg'd pg100.txt +two-line pg3200.txt +two-minute pg3200.txt +two-story pg3200.txt +two-thirds pg3200.txt +two. pg31100.txt, pg3200.txt, pg100.txt +two." pg31100.txt, pg3200.txt +two.] pg3200.txt +two: pg31100.txt, pg3200.txt, pg100.txt +two; pg31100.txt, pg3200.txt, pg100.txt +two? pg3200.txt, pg100.txt +two?" pg31100.txt, pg3200.txt +twos pg3200.txt +tybalt pg100.txt +tybalt. pg100.txt +tybalt; pg100.txt +tybalt? pg100.txt +tying pg31100.txt, pg3200.txt +tymys pg3200.txt +tyndall, pg3200.txt +type pg3200.txt +type, pg3200.txt +type-girl pg3200.txt +type-machine, pg3200.txt +type-machine. pg3200.txt +type-setter. pg3200.txt +type-setting pg3200.txt +type. pg3200.txt +type." pg3200.txt +type; pg3200.txt +types pg3200.txt +typesetter pg3200.txt +typesetting pg3200.txt +typesetting-machine.) pg3200.txt +typewriter pg3200.txt +typewriter, pg3200.txt +typewriter-table pg3200.txt +typewrites pg3200.txt +typewriting pg3200.txt +typewriting, pg3200.txt +typhoid! pg3200.txt +typical pg3200.txt +typography pg3200.txt +tyrannical pg3200.txt +tyrannical, pg3200.txt +tyrannical. pg3200.txt +tyrannical.' pg3200.txt +tyrannical; pg100.txt +tyrannize pg100.txt +tyrannous pg100.txt +tyrannous; pg100.txt +tyranny pg3200.txt, pg100.txt +tyranny, pg31100.txt, pg100.txt +tyranny- pg100.txt +tyranny. pg100.txt +tyranny; pg100.txt +tyrant pg3200.txt, pg100.txt +tyrant! pg100.txt +tyrant, pg100.txt +tyrant. pg100.txt +tyrant." pg100.txt +tyrant; pg100.txt +tyrant? pg3200.txt, pg100.txt +tyrants pg3200.txt +tyringham, pg3200.txt +tyrol, pg3200.txt +tyropean pg3200.txt +tyrrel pg100.txt +tyrrel. pg100.txt +tyrrel? pg100.txt +tysis, pg3200.txt +u. pg100.txt +u., pg3200.txt +u.s. pg31100.txt, pg3200.txt, pg100.txt +ubergeschlagen! pg3200.txt +udolpho pg31100.txt +udolpho, pg31100.txt +udolpho?" pg31100.txt +uds; pg100.txt +ugliness pg3200.txt +ugliness. pg3200.txt +ugly pg3200.txt, pg100.txt +ugly, pg3200.txt +ugly. pg31100.txt, pg3200.txt, pg100.txt +uhlic pg3200.txt +uitlanders pg3200.txt +ulcer! pg100.txt +ulcerations, pg3200.txt +ulfius?" pg3200.txt +ulrich pg3200.txt +ulrich?" pg3200.txt +ulsters pg3200.txt +ulterior pg3200.txt +ultimate pg3200.txt +ultimately pg3200.txt +ulysses pg3200.txt, pg100.txt +ulysses! pg100.txt +ulysses, pg100.txt +ulysses. pg100.txt +ulysses? pg100.txt +um pg3200.txt +um, pg3200.txt +um--welches? pg3200.txt +um. pg100.txt +um." pg3200.txt +umbra pg100.txt +umbrella pg3200.txt +umbrella!" pg31100.txt, pg3200.txt +umbrella, pg3200.txt +umbrella--" pg31100.txt +umbrella. pg3200.txt +umbrella; pg3200.txt +umbrellas pg31100.txt, pg3200.txt +umbrellas, pg3200.txt +umbrellas; pg3200.txt +ummed pg3200.txt +umpire--clarence. pg3200.txt +umpires, pg3200.txt +umsteigen! pg3200.txt +umsteigen. pg3200.txt +umsteigen? pg3200.txt +umsteignen! pg3200.txt +un pg3200.txt +un- pg3200.txt +un--" pg3200.txt +un-feeling; pg100.txt +unabashed pg3200.txt +unabhaengigkeitserklaerungen. pg3200.txt +unable pg31100.txt, pg3200.txt +unable. pg100.txt +unabridged pg3200.txt +unaccomplished--" pg3200.txt +unaccountable pg31100.txt, pg3200.txt +unaccountable!" pg31100.txt +unaccountable--so pg3200.txt +unaccountable. pg3200.txt +unaccountably pg3200.txt +unaccustomed pg3200.txt +unacquainted pg31100.txt, pg3200.txt +unacquainted- pg100.txt +unactive, pg100.txt +unadorned pg31100.txt +unadvis'd pg100.txt +unadvis'd, pg100.txt +unadvised, pg3200.txt +unaffected pg31100.txt, pg3200.txt +unaffected, pg31100.txt, pg3200.txt +unafraid pg3200.txt +unallied pg31100.txt +unalloyed, pg31100.txt +unalloyed. pg31100.txt +unalterable pg3200.txt +unalterable; pg31100.txt +unanel'd, pg100.txt +unanimity. pg3200.txt +unanimous pg3200.txt +unannoying pg3200.txt +unanswerable pg31100.txt +unanswerable. pg31100.txt +unanswerable: pg3200.txt +unappeas'd, pg100.txt +unappeasable pg3200.txt +unappeasable. pg3200.txt +unappeased pg3200.txt +unapproachable pg3200.txt +unapproachable, pg3200.txt +unapt pg100.txt +unarm'd pg100.txt +unarm'd, pg100.txt +unarmed pg3200.txt +unarmed. pg3200.txt +unarticulated, pg3200.txt +unasked, pg3200.txt +unasked. pg3200.txt +unassailable. pg3200.txt +unassailably pg3200.txt +unassisted." pg3200.txt +unattended pg31100.txt +unattended!" pg31100.txt +unattractive, pg3200.txt +unavailing pg3200.txt +unavailing, pg31100.txt +unavailing. pg31100.txt +unavenged. pg3200.txt +unavoidable pg3200.txt +unavoidable. pg31100.txt +unavoidable." pg31100.txt +unavoidably pg3200.txt +unaware pg3200.txt +unaware! pg3200.txt +unaware. pg3200.txt +unaware; pg31100.txt +unawares pg3200.txt +unawares, pg3200.txt, pg100.txt +unawares. pg31100.txt, pg3200.txt, pg100.txt +unawares; pg100.txt +unbak'd pg100.txt +unbanded, pg100.txt +unbearable. pg3200.txt +unbearably pg3200.txt +unbecoming pg3200.txt +unbecoming. pg31100.txt, pg100.txt +unbegot, pg100.txt +unbeknowns pg3200.txt +unbelief pg3200.txt +unbelief. pg3200.txt +unbelievable--but pg3200.txt +unbelievable. pg3200.txt +unbelievers pg3200.txt +unbend. pg3200.txt +unbending, pg31100.txt +unblemished pg3200.txt +unblest, pg100.txt +unblushingly pg3200.txt +unborn pg100.txt +unbosom pg100.txt +unbottoned--" pg3200.txt +unbound. pg100.txt +unbounded pg3200.txt +unbox pg3200.txt +unbrac'd, pg100.txt +unbred, pg100.txt +unbreech'd, pg100.txt +unbroken pg31100.txt, pg3200.txt +unbroken, pg31100.txt +unbruis'd, pg100.txt +unbuckle. pg100.txt +unburnt pg100.txt +unbutton pg100.txt +uncalculated pg3200.txt +uncanny pg3200.txt +uncanny, pg3200.txt +uncanny. pg3200.txt +uncapable pg3200.txt +uncape. pg100.txt +uncatchable pg3200.txt +uncaught; pg100.txt +uncaught? pg100.txt +unceasing pg31100.txt, pg3200.txt +unceasingly pg31100.txt, pg3200.txt +unceasingly. pg3200.txt +uncensured pg31100.txt +uncertain pg31100.txt, pg3200.txt +uncertain, pg31100.txt, pg3200.txt, pg100.txt +uncertain. pg3200.txt +uncertain." pg3200.txt +uncertain? pg3200.txt +uncertainties pg3200.txt +uncertainties. pg3200.txt +uncertainty pg31100.txt, pg3200.txt +uncertainty! pg100.txt +uncertainty, pg3200.txt, pg100.txt +uncertainty. pg3200.txt, pg100.txt +uncertainty." pg3200.txt +unchain pg3200.txt +unchallenged. pg3200.txt +unchanged pg3200.txt +unchanged, pg3200.txt +unchanging pg3200.txt +unchanging, pg100.txt +uncharacteristic pg3200.txt +uncharitable pg3200.txt +uncharted pg3200.txt +uncheerful. pg31100.txt +uncivil pg100.txt +uncivil? pg31100.txt +uncivilized pg3200.txt +unclasp'd pg100.txt +unclasping pg3200.txt +unclassifiable pg3200.txt +uncle pg31100.txt, pg3200.txt, pg100.txt +uncle! pg100.txt +uncle!" pg31100.txt +uncle!- pg100.txt +uncle's pg31100.txt, pg3200.txt +uncle's, pg3200.txt +uncle's. pg31100.txt, pg100.txt +uncle's; pg31100.txt +uncle, pg31100.txt, pg3200.txt, pg100.txt +uncle," pg31100.txt +uncle- pg100.txt +uncle-father pg100.txt +uncle. pg31100.txt, pg3200.txt, pg100.txt +uncle." pg31100.txt, pg3200.txt +uncle.' pg3200.txt +uncle; pg100.txt +uncle? pg3200.txt, pg100.txt +uncle?" pg31100.txt, pg3200.txt +unclean. pg100.txt +uncleanness pg100.txt +uncleanness." pg3200.txt +uncles pg3200.txt +uncles! pg100.txt +uncles. pg31100.txt +uncles? pg100.txt +unclodded. pg3200.txt +unclouded, pg3200.txt +uncolted. pg100.txt +uncombed pg3200.txt +uncomely pg3200.txt +uncomfortable pg31100.txt, pg3200.txt +uncomfortable, pg31100.txt, pg3200.txt +uncomfortable. pg3200.txt +uncomfortable." pg31100.txt, pg3200.txt +uncomfortable; pg3200.txt +uncomfortably pg31100.txt, pg3200.txt +uncomfortably: pg3200.txt +uncommercial pg3200.txt +uncommon pg31100.txt, pg3200.txt +uncommon, pg31100.txt +uncommon--now pg31100.txt +uncommon. pg31100.txt +uncommon." pg31100.txt +uncommonly pg31100.txt, pg3200.txt +uncomplaining, pg3200.txt +uncompleted pg3200.txt +uncompromising pg3200.txt +uncompromisingly pg3200.txt +unconcern pg31100.txt +unconcerned pg31100.txt +unconcerned, pg31100.txt +unconditional pg3200.txt +unconfessed." pg3200.txt +unconfined pg3200.txt +unconfused pg3200.txt +unconnected pg31100.txt +unconnected; pg31100.txt +unconquerable pg31100.txt +unconquerable. pg3200.txt +unconscious pg3200.txt +unconscious, pg3200.txt +unconscious--walks pg3200.txt +unconscious. pg3200.txt +unconsciously pg31100.txt, pg3200.txt +unconsciously, pg31100.txt, pg3200.txt +unconsciously. pg3200.txt +unconsciously; pg3200.txt +unconsciousness pg3200.txt +unconsciousness, pg3200.txt +unconsciousness; pg3200.txt +unconsciousnesses pg3200.txt +unconstant. pg100.txt +unconstitutional. pg3200.txt +unconstrain'd, pg100.txt +uncontrollable pg3200.txt +uncontrollable, pg3200.txt +unconventional pg3200.txt +unconventional. pg3200.txt +uncorking--jokes pg3200.txt +uncouth pg100.txt +uncouth; pg3200.txt +uncouthness pg31100.txt +uncover pg3200.txt +uncovered pg3200.txt, pg100.txt +uncovered, pg3200.txt +uncovered. pg3200.txt +uncovered; pg3200.txt +uncovering pg3200.txt +uncreated pg3200.txt +uncredited pg3200.txt +uncrowned pg3200.txt +unction, pg3200.txt +uncultivated pg3200.txt +uncurable; pg100.txt +uncurls pg100.txt +uncurrent pg100.txt +uncurtained pg3200.txt +uncurtained; pg3200.txt +und pg3200.txt +undecided pg3200.txt +undecided. pg3200.txt +undecorated pg3200.txt +undefended. pg3200.txt +undefined pg3200.txt +undeniable pg3200.txt +under pg31100.txt, pg3200.txt, pg100.txt +under, pg31100.txt, pg3200.txt +under-bear. pg100.txt +under-description; pg3200.txt +under-servants pg31100.txt +under-study pg3200.txt +under. pg31100.txt, pg3200.txt +under; pg3200.txt +under? pg100.txt +underborne pg100.txt +underbrush, pg3200.txt +underclothes pg3200.txt +underfoot pg3200.txt +underfoot, pg3200.txt +underfoot. pg100.txt +undergo pg31100.txt, pg3200.txt, pg100.txt +undergo, pg100.txt +undergo- pg100.txt +undergo. pg3200.txt +undergoes, pg100.txt +undergone pg31100.txt +undergone, pg31100.txt +underground pg3200.txt, pg100.txt +underground. pg3200.txt +underhanded pg3200.txt +underlings. pg100.txt +underlying pg3200.txt +undermine pg3200.txt +undermined pg3200.txt +undermining pg3200.txt +underneath pg3200.txt +underneath. pg3200.txt +underrates pg3200.txt +underside pg3200.txt +undersigned pg3200.txt +undersigned. pg3200.txt +understan'. pg3200.txt +understan'?" pg3200.txt +understand pg31100.txt, pg3200.txt, pg100.txt +understand'st. pg100.txt +understand, pg31100.txt, pg3200.txt, pg100.txt +understand----" pg3200.txt +understand--and pg3200.txt +understand--thus: pg3200.txt +understand--would pg3200.txt +understand. pg31100.txt, pg3200.txt, pg100.txt +understand." pg31100.txt, pg3200.txt +understand.--we pg31100.txt +understand.] pg3200.txt +understand: pg3200.txt, pg100.txt +understand; pg3200.txt +understand? pg3200.txt +understand?" pg31100.txt, pg3200.txt +understand?' pg3200.txt +understand?--i pg3200.txt +understandable pg3200.txt +understandable, pg3200.txt +understanding pg31100.txt, pg3200.txt, pg100.txt +understanding, pg31100.txt, pg3200.txt, pg100.txt +understanding--i pg31100.txt +understanding--though pg31100.txt +understanding. pg31100.txt, pg3200.txt +understanding." pg31100.txt, pg3200.txt +understanding; pg31100.txt +understandingly.' pg3200.txt +understandings pg100.txt +understands pg31100.txt, pg3200.txt +understands. pg3200.txt +understands." pg3200.txt +understood pg31100.txt, pg3200.txt, pg100.txt +understood, pg31100.txt +understood--then: pg3200.txt +understood. pg31100.txt, pg3200.txt, pg100.txt +understood." pg31100.txt +understood? pg31100.txt +undertake pg31100.txt, pg3200.txt, pg100.txt +undertake't; pg100.txt +undertake, pg100.txt +undertake. pg3200.txt +undertake?" pg3200.txt +undertaken pg31100.txt, pg3200.txt +undertaken, pg3200.txt +undertaker pg3200.txt +undertaker!--out pg3200.txt +undertaker's pg3200.txt +undertaker, pg3200.txt +undertaker-eye pg3200.txt +undertaker-furniture pg3200.txt +undertaker. pg3200.txt +undertaker." pg3200.txt +undertaker.' pg3200.txt +undertaker: pg3200.txt +undertaker?" pg3200.txt +undertakers; pg3200.txt +undertakes pg31100.txt, pg100.txt +undertakes, pg3200.txt +undertaking pg31100.txt, pg3200.txt, pg100.txt +undertaking, pg31100.txt, pg3200.txt +undertaking. pg3200.txt +undertakings pg100.txt +undertone pg3200.txt +undertone. pg3200.txt +undertook pg31100.txt, pg3200.txt, pg100.txt +undertook. pg3200.txt, pg100.txt +undertow pg3200.txt +undervalu'd pg100.txt +undervalued pg31100.txt +undervoice, pg31100.txt +underwent pg3200.txt +underwent? pg100.txt +underwrit, pg100.txt +underwriters pg3200.txt +underwriters! pg3200.txt +undescribable. pg31100.txt +undescried. pg100.txt +undeserved pg3200.txt +undeserved." pg31100.txt +undeserver, pg100.txt +undeservers. pg100.txt +undeserving pg3200.txt +undesigned. pg31100.txt +undesirable pg31100.txt +undesirable. pg3200.txt +undeveloped pg3200.txt +undigested--and pg3200.txt +undignified pg3200.txt +undiminished pg3200.txt +undiminished; pg3200.txt +undimmed, pg3200.txt +undinted. pg100.txt +undiscernible, pg100.txt +undiscovered pg31100.txt, pg3200.txt +undishonoured. pg100.txt +undismayed: pg3200.txt +undispos'd. pg100.txt +undistinguishable, pg100.txt +undistinguishable. pg100.txt +undisturbed pg3200.txt +undisturbed. pg3200.txt +undisturbed; pg3200.txt +undisturbed? pg3200.txt +undivided pg3200.txt +undo pg31100.txt, pg3200.txt, pg100.txt +undo, pg3200.txt +undo. pg100.txt +undo? pg100.txt +undoing pg31100.txt, pg3200.txt, pg100.txt +undoing, pg3200.txt +undoing." pg3200.txt +undone pg31100.txt, pg100.txt +undone! pg100.txt +undone. pg3200.txt, pg100.txt +undoubted pg31100.txt +undoubtedly pg31100.txt, pg3200.txt, pg100.txt +undoubtedly, pg31100.txt +undoubtedly. pg3200.txt +undoubtedly." pg31100.txt +undoubtedly; pg31100.txt +undress pg3200.txt +undressed pg3200.txt +undrinkable pg3200.txt +undrown'd pg100.txt +undrown'd. pg100.txt +undue pg31100.txt +undutiful. pg100.txt +une pg3200.txt +unearned pg3200.txt +unearthed pg3200.txt +unearthed. pg3200.txt +unearthly, pg100.txt +uneasily. pg3200.txt +uneasily: pg3200.txt +uneasines pg100.txt +uneasiness pg31100.txt, pg3200.txt +uneasiness, pg31100.txt, pg3200.txt +uneasiness. pg31100.txt, pg3200.txt +uneasiness." pg3200.txt +uneasiness.--i pg31100.txt +uneasiness; pg31100.txt, pg3200.txt +uneasy pg31100.txt, pg3200.txt +uneasy, pg3200.txt +uneasy. pg31100.txt, pg3200.txt +uneasy." pg31100.txt, pg3200.txt +uneasy.--she pg31100.txt +uneasy; pg31100.txt +uneasy? pg3200.txt +uneducated pg3200.txt +uneducated, pg100.txt +unelected. pg100.txt +unembarrassed pg3200.txt +unembarrassed; pg3200.txt +unembarrassing, pg3200.txt +unembellished pg3200.txt +unemployed. pg3200.txt +unencumbered pg3200.txt +unendurable pg3200.txt +unendurable. pg3200.txt +unendurable; pg3200.txt +unenforceability pg31100.txt, pg3200.txt, pg100.txt +unengaged pg31100.txt +unequal pg31100.txt, pg3200.txt +unerringly, pg3200.txt +unertragliche pg3200.txt +uneven, pg100.txt +uneventful pg31100.txt +unexamined pg3200.txt +unexceptionable, pg31100.txt +unexceptionable. pg31100.txt +unexceptionable; pg31100.txt +unexcited pg3200.txt +unexecuted pg100.txt +unexpectantly pg3200.txt +unexpected pg31100.txt, pg3200.txt +unexpected, pg31100.txt, pg100.txt +unexpected. pg31100.txt, pg3200.txt +unexpected.' pg3200.txt +unexpected: pg3200.txt +unexpectedly pg31100.txt, pg3200.txt +unexpectedly, pg3200.txt +unexpectedness; pg3200.txt +unexperienc'd pg100.txt +unexplained pg3200.txt +unexplored pg3200.txt +unfading, pg3200.txt +unfailing pg3200.txt +unfailingly pg3200.txt +unfair pg3200.txt +unfairly pg3200.txt +unfairly, pg3200.txt +unfairly. pg31100.txt +unfaithful; pg3200.txt +unfallible. pg100.txt +unfam'd pg100.txt +unfamiliar pg3200.txt +unfantastic? pg3200.txt +unfashionable pg31100.txt, pg100.txt +unfastened pg31100.txt +unfathered, pg100.txt +unfatigued. pg3200.txt +unfavourable pg31100.txt +unfavourable, pg31100.txt +unfeeling pg31100.txt, pg3200.txt +unfeelingly. pg31100.txt +unfeignedly. pg100.txt +unfellowed. pg100.txt +unfelt pg31100.txt +unfertile pg3200.txt +unfinished pg31100.txt, pg3200.txt +unfinished, pg31100.txt, pg3200.txt +unfinished. pg3200.txt +unfired pg3200.txt +unfirm, pg100.txt +unfit pg31100.txt, pg3200.txt +unfit. pg100.txt +unfitness pg3200.txt +unfitness, pg31100.txt +unfitness- pg100.txt +unfitted pg3200.txt +unfitted. pg3200.txt +unfixed pg31100.txt +unfledg'd, pg100.txt +unflinching, pg3200.txt +unfold pg31100.txt, pg3200.txt, pg100.txt +unfold, pg3200.txt, pg100.txt +unfold. pg100.txt +unfold: pg100.txt +unfolded pg100.txt +unfolded." pg31100.txt +unfolded; pg31100.txt +unfolds. pg100.txt +unforfeited! pg100.txt +unforgotten pg3200.txt +unfortunate pg31100.txt, pg3200.txt +unfortunate, pg31100.txt, pg3200.txt +unfortunate--most pg31100.txt +unfortunate. pg31100.txt, pg3200.txt, pg100.txt +unfortunate; pg100.txt +unfortunately pg31100.txt, pg3200.txt +unfortunately, pg31100.txt, pg3200.txt +unfounded; pg31100.txt +unfrequently. pg31100.txt +unfriendly pg31100.txt +unfulfilled. pg3200.txt +unfurnished--from pg31100.txt +ungallant pg3200.txt +ungarter'd! pg100.txt +ungeneral. pg3200.txt +ungenerous, pg31100.txt +ungenerous. pg31100.txt, pg3200.txt +ungenteel pg31100.txt +ungentle. pg100.txt +ungentleness pg100.txt +ungently? pg100.txt +ungenuine pg3200.txt +ungermanic pg3200.txt +ungird pg100.txt +ungot. pg100.txt +ungovern'd; pg100.txt +ungovernable pg31100.txt +ungracious pg31100.txt +ungracious, pg31100.txt +ungracious. pg31100.txt +ungraciousness, pg31100.txt +ungrammatical, pg3200.txt +ungraspable pg3200.txt +ungrateful pg3200.txt, pg100.txt +ungrateful!" pg31100.txt +ungrateful, pg31100.txt +ungrateful." pg31100.txt +ungratefulness pg3200.txt +unguarded pg31100.txt, pg3200.txt +unguem. pg100.txt +unhampered pg3200.txt +unhandled, pg100.txt +unhandsomest pg3200.txt +unhanged pg3200.txt +unhappier pg3200.txt +unhappily. pg100.txt +unhappily: pg100.txt +unhappiness pg31100.txt, pg100.txt +unhappiness! pg100.txt +unhappiness, pg31100.txt +unhappiness. pg31100.txt, pg3200.txt +unhappy pg31100.txt, pg3200.txt +unhappy! pg31100.txt, pg100.txt +unhappy, pg31100.txt, pg3200.txt +unhappy. pg31100.txt, pg3200.txt, pg100.txt +unhappy." pg31100.txt +unhappy."-- pg31100.txt +unhappy: pg100.txt +unhappy; pg31100.txt, pg3200.txt, pg100.txt +unhappy? pg3200.txt +unhappy?" pg31100.txt +unharm'd. pg100.txt +unharmed! pg3200.txt +unhealthy pg3200.txt +unheard pg31100.txt, pg3200.txt +unheedfully. pg100.txt +unheroic pg3200.txt +unhesitatingly pg3200.txt +unhesitatingly. pg3200.txt +unholy, pg3200.txt +unhonored pg3200.txt +unhuman pg3200.txt +unhurt. pg3200.txt +unicorn, pg100.txt +uniform pg3200.txt +uniform, pg3200.txt +uniform,) pg3200.txt +uniform--[policeman] pg3200.txt +uniform. pg31100.txt +uniform; pg3200.txt +uniformed pg3200.txt +uniformity pg3200.txt +uniformly pg31100.txt +uniformly, pg3200.txt +uniforms pg3200.txt +uniforms, pg3200.txt +uniforms; pg3200.txt +unimaginable pg3200.txt +unimaginable. pg3200.txt +unimaginative, pg3200.txt +unimpaired pg31100.txt +unimpaired, pg3200.txt +unimpaired. pg3200.txt +unimpeachable pg3200.txt +unimpeachable, pg3200.txt +unimportant pg3200.txt +unimportant. pg3200.txt +unimpressive. pg3200.txt +uninfluenced pg3200.txt +uninformed pg31100.txt +uninhabited pg3200.txt +uninjured. pg3200.txt +uninspired pg3200.txt +uninstructed. pg3200.txt +unintelligent pg3200.txt +unintelligibility pg3200.txt +unintelligible pg3200.txt +unintelligible." pg31100.txt +unintentional pg3200.txt +unintentional. pg31100.txt +uninteresting pg3200.txt +uninteresting, pg31100.txt +unintricate, pg31100.txt +uninvited pg31100.txt +uninvited, pg3200.txt +uninvited--he pg3200.txt +uninvited. pg3200.txt +uninviting pg3200.txt +union pg31100.txt, pg3200.txt +union, pg31100.txt, pg3200.txt +union--men pg3200.txt +union. pg31100.txt, pg3200.txt +union.' pg3200.txt +union.) pg3200.txt +unionist pg3200.txt +unionville. pg3200.txt +unique pg3200.txt +unique" pg3200.txt +unique. pg3200.txt +unison pg31100.txt +unison, pg3200.txt +unison--recite!" pg3200.txt +unison: pg3200.txt +unissued, pg3200.txt +unit pg3200.txt +unitarians, pg3200.txt +unite pg31100.txt, pg100.txt +united pg31100.txt, pg3200.txt, pg100.txt +united, pg100.txt +united." pg31100.txt +unities, pg3200.txt +unity pg3200.txt +unity, pg100.txt +unity. pg100.txt +universal pg31100.txt, pg3200.txt, pg100.txt +universal, pg3200.txt +universal. pg31100.txt, pg3200.txt, pg100.txt +universal; pg3200.txt +universalists, pg3200.txt +universally pg31100.txt, pg3200.txt +universe pg3200.txt +universe! pg3200.txt +universe, pg3200.txt +universe. pg3200.txt, pg100.txt +universe." pg3200.txt +universe: pg3200.txt +universities, pg31100.txt +universities. pg100.txt +university pg3200.txt +university's pg3200.txt +university, pg3200.txt +university. pg3200.txt, pg100.txt +university; pg3200.txt +unjust pg31100.txt, pg3200.txt, pg100.txt +unjust! pg100.txt +unjust, pg3200.txt, pg100.txt +unjust. pg3200.txt +unjust." pg31100.txt +unjust; pg100.txt +unjust? pg100.txt +unjustifiable pg31100.txt +unjustly pg31100.txt +unjustly. pg3200.txt, pg100.txt +unkind pg31100.txt, pg3200.txt, pg100.txt +unkind, pg31100.txt, pg3200.txt +unkind. pg31100.txt, pg100.txt +unkind; pg31100.txt, pg100.txt +unkind? pg100.txt +unkindly pg100.txt +unkindly. pg31100.txt +unkindness pg31100.txt, pg3200.txt, pg100.txt +unkindness, pg31100.txt, pg100.txt +unkindness. pg31100.txt, pg100.txt +unkindness; pg100.txt +unkindnesses pg3200.txt +unkinglike. pg100.txt +unkiss'd. pg100.txt +unknit pg100.txt +unknowingly pg3200.txt +unknown pg31100.txt, pg3200.txt, pg100.txt +unknown! pg100.txt +unknown, pg3200.txt, pg100.txt +unknown. pg3200.txt, pg100.txt +unknown." pg31100.txt +unknown.... pg3200.txt +unknown: pg3200.txt +unknown; pg100.txt +unknowns, pg3200.txt +unlasting pg3200.txt +unlawful pg3200.txt +unlawful. pg100.txt +unlawfully? pg100.txt +unlearn pg3200.txt +unlearned, pg100.txt +unleashed pg3200.txt +unless pg31100.txt, pg3200.txt, pg100.txt +unless, pg3200.txt +unlettered pg3200.txt +unlettered, pg3200.txt +unlicensed pg3200.txt +unlighted pg3200.txt +unlike pg31100.txt, pg3200.txt, pg100.txt +unlike. pg31100.txt +unlikely pg31100.txt, pg3200.txt, pg100.txt +unlikely. pg100.txt +unlikely." pg31100.txt +unload pg3200.txt +unloaded pg3200.txt +unloads pg3200.txt +unlocatable pg3200.txt +unlock pg100.txt +unlocked pg3200.txt +unlocked. pg3200.txt +unlocking pg3200.txt +unlocks pg3200.txt +unlooked pg31100.txt +unloose pg3200.txt +unloose, pg100.txt +unlov'd? pg100.txt +unlovable pg3200.txt +unlovely; pg31100.txt +unluckily pg31100.txt, pg100.txt +unluckily, pg31100.txt, pg3200.txt +unlucky pg31100.txt, pg3200.txt +unlucky." pg31100.txt +unmailed pg3200.txt +unmajestic pg31100.txt +unmanageable pg31100.txt +unmanned pg3200.txt +unmannerly pg100.txt +unmannerly, pg100.txt +unmannerly. pg100.txt +unmannerly; pg100.txt +unmarred pg3200.txt +unmarred; pg3200.txt +unmarried pg3200.txt +unmarried." pg31100.txt +unmasculine. pg3200.txt +unmask pg100.txt +unmask. pg100.txt +unmasked; pg3200.txt +unmatchable, pg3200.txt, pg100.txt +unmeaning pg3200.txt +unmeet, pg100.txt +unmeet: pg100.txt +unmentioned, pg3200.txt +unmentioned. pg31100.txt +unmercifully pg3200.txt +unmercifully." pg31100.txt +unmerited.'" pg31100.txt +unmeriting, pg100.txt +unmingled pg3200.txt +unmingled. pg100.txt +unmiraculous pg3200.txt +unmitigated pg100.txt +unmixed pg31100.txt +unmoan'd; pg100.txt +unmoglich, pg3200.txt +unmolested pg3200.txt +unmolested, pg3200.txt +unmolested. pg3200.txt +unmoved, pg31100.txt +unmutilated. pg3200.txt +unnatural pg3200.txt, pg100.txt +unnatural! pg100.txt +unnatural, pg100.txt +unnatural. pg31100.txt, pg100.txt +unnatural; pg100.txt +unnecessarily pg3200.txt, pg100.txt +unnecessarily. pg3200.txt +unnecessary pg31100.txt, pg3200.txt +unnecessary, pg3200.txt +unnecessary. pg31100.txt, pg3200.txt +unnecessary." pg3200.txt +unnecessary; pg31100.txt, pg3200.txt +unnecessary? pg31100.txt +unnecessity pg3200.txt +unneighbourly! pg100.txt +unnerved, pg3200.txt +unnoted pg100.txt +unnoticed pg3200.txt +unnoticed, pg3200.txt +unnoticed. pg31100.txt, pg3200.txt +unobjectionable pg3200.txt +unobservant pg31100.txt, pg3200.txt +unobserved. pg31100.txt +unobstructed pg3200.txt +unobstructed. pg3200.txt +unoccupied, pg3200.txt +unoccupied. pg3200.txt +unoffending pg31100.txt, pg3200.txt +unofficial pg3200.txt +unornamental, pg3200.txt +unornamented pg3200.txt +unostentatiously pg3200.txt +unostentatiously, pg3200.txt +unpaid pg100.txt +unpainted pg3200.txt +unpalatable. pg3200.txt +unparagon'd, pg100.txt +unparallel'd. pg100.txt +unpardonable pg31100.txt +unpardonable, pg31100.txt +unpardonable. pg31100.txt +unpeople pg100.txt +unperceived, pg31100.txt +unpersuadable; pg31100.txt +unpicturesque pg3200.txt +unpleasant pg31100.txt, pg3200.txt +unpleasant, pg31100.txt, pg3200.txt +unpleasant. pg31100.txt, pg3200.txt +unpleasant; pg31100.txt +unpleasantest pg31100.txt +unpleasantly, pg3200.txt +unpleasantly." pg31100.txt +unpleasantness pg31100.txt +unpolicied! pg100.txt +unpopular pg3200.txt +unpopular. pg3200.txt +unpopularity pg3200.txt +unpossess'd? pg100.txt +unpossible. pg100.txt +unpracticed pg3200.txt +unpractis'd; pg100.txt +unpractised pg3200.txt +unprecedented! pg3200.txt +unpregnant pg100.txt +unprejudiced pg3200.txt +unprejudiced. pg31100.txt +unpremeditated. pg100.txt +unprepared pg31100.txt, pg3200.txt +unprepared, pg3200.txt, pg100.txt +unprepared. pg3200.txt +unprepared." pg3200.txt +unprepared; pg31100.txt +unpretending pg31100.txt, pg3200.txt +unpretentious; pg3200.txt +unpretty; pg31100.txt +unprintable, pg3200.txt +unprizable, pg100.txt +unproductive, pg3200.txt +unproductive. pg3200.txt +unprofitable pg100.txt +unprogressive, pg3200.txt +unprompted, pg31100.txt +unproperly pg100.txt +unprosperous, pg3200.txt +unprotected pg31100.txt, pg3200.txt +unprotected. pg3200.txt +unprovided pg31100.txt +unprovided. pg100.txt +unprovided? pg100.txt +unprovident. pg100.txt +unpublished). pg3200.txt +unpublished. pg3200.txt +unqualified pg3200.txt +unqualifiedly. pg3200.txt +unquestion'd pg100.txt +unquestionable. pg3200.txt +unquestionably pg3200.txt +unquiet pg100.txt +unquietly. pg100.txt +unquietness. pg100.txt +unquietness? pg100.txt +unraveled. pg3200.txt +unravelled. pg31100.txt +unreached. pg3200.txt +unread, pg31100.txt, pg100.txt +unread. pg3200.txt +unread: pg3200.txt +unreadable; pg3200.txt +unready pg100.txt +unreal, pg3200.txt +unreality. pg3200.txt +unrealizable pg3200.txt +unrealizable, pg3200.txt +unrealizable. pg3200.txt +unreasonable pg31100.txt, pg3200.txt +unreasonable, pg3200.txt, pg100.txt +unreasonable. pg31100.txt, pg3200.txt +unreasonable; pg31100.txt +unreasonably pg31100.txt +unreasoning pg3200.txt +unrebuked, pg3200.txt +unrebuked. pg3200.txt +unreceipted pg3200.txt +unrecorded, pg3200.txt +unreflecting pg3200.txt +unrefreshed. pg3200.txt +unregular. pg3200.txt +unrelated pg3200.txt +unrelated, pg3200.txt +unreliable--enterprise. pg3200.txt +unreliable. pg3200.txt +unrelieved pg3200.txt +unremovably pg100.txt +unrented. pg3200.txt +unrepentant. pg3200.txt +unrepresented--and pg3200.txt +unreproached. pg3200.txt +unreproved pg3200.txt +unreserve pg31100.txt +unreserve; pg31100.txt +unreserved pg31100.txt, pg3200.txt +unrespected, pg100.txt +unresponsive pg3200.txt +unrest pg100.txt +unrest, pg100.txt +unrest. pg3200.txt, pg100.txt +unrest; pg100.txt +unrestfully, pg3200.txt +unrestrained pg31100.txt +unrevealed pg3200.txt +unrevealed. pg3200.txt +unrewarded, pg3200.txt +unright, pg3200.txt +unrighteous." pg3200.txt +unroll pg100.txt +unroll'd, pg100.txt +unromantic pg3200.txt +unroosted pg100.txt +unruffle pg3200.txt +unsacrilegious pg3200.txt +unsaddled pg3200.txt +unsafe pg31100.txt, pg3200.txt, pg100.txt +unsaid pg31100.txt +unsaid. pg3200.txt +unsaid; pg31100.txt +unsalable. pg3200.txt +unsanctified pg100.txt +unsatisfactory pg3200.txt +unsatisfactory. pg3200.txt +unsatisfied pg3200.txt +unsatisfied, pg3200.txt, pg100.txt +unsatisfied. pg100.txt +unsatisfied; pg3200.txt +unsatisfied? pg100.txt +unsatisfying pg3200.txt +unsay. pg100.txt +unschool'd; pg100.txt +unscorch'd. pg100.txt +unseal pg100.txt +unseal'd- pg100.txt +unsealed pg3200.txt +unsearch'd. pg100.txt +unsearched, pg31100.txt +unseasonable, pg31100.txt +unseasonable. pg100.txt +unseasonableness pg31100.txt +unseated pg3200.txt +unseemly pg3200.txt +unseen pg31100.txt, pg3200.txt, pg100.txt +unseen, pg31100.txt, pg3200.txt, pg100.txt +unseen. pg31100.txt, pg100.txt +unselfish pg3200.txt +unselfish, pg3200.txt +unselfishness. pg3200.txt +unsent pg3200.txt +unsentimental pg3200.txt +unsentimentality pg3200.txt +unserviceable. pg100.txt +unset, pg100.txt +unsettle. pg100.txt +unsettled pg31100.txt +unsettled, pg100.txt +unsettled. pg3200.txt, pg100.txt +unsettledness, pg3200.txt +unsettling. pg3200.txt +unsex pg100.txt +unshak'd pg100.txt +unshaken!! pg3200.txt +unshaken, pg31100.txt +unsheath'd, pg100.txt +unshod. pg3200.txt +unshown pg100.txt +unshunn'd pg100.txt +unsightly pg3200.txt +unsinew'd, pg100.txt +unskilful pg100.txt +unskillful pg100.txt +unslung pg3200.txt +unsmiling pg3200.txt +unsmiling; pg3200.txt +unsmirched pg3200.txt +unsmirched. pg3200.txt +unsocial, pg31100.txt +unsolicited pg100.txt +unsolicited. pg100.txt +unsonneted, pg3200.txt +unsophisticated, pg3200.txt +unsought pg100.txt +unsound pg3200.txt, pg100.txt +unspeakable pg3200.txt +unspeakable. pg3200.txt +unspeakable; pg100.txt +unspeakably pg3200.txt +unspeakably!" pg3200.txt +unspoiled. pg3200.txt +unspoke pg100.txt +unspoken pg3200.txt, pg100.txt +unspoken, pg3200.txt +unsquar'd, pg100.txt +unstable pg3200.txt +unstable, pg3200.txt +unstained pg3200.txt +unstanched pg100.txt +unstate pg100.txt +unsteadiness, pg31100.txt +unsteady pg31100.txt, pg3200.txt +unsteady, pg3200.txt +unsteady; pg31100.txt +unstinted pg3200.txt +unstudied pg31100.txt, pg3200.txt +unsuccessful pg3200.txt +unsuccessful, pg31100.txt, pg3200.txt +unsuitable." pg31100.txt +unsuited pg31100.txt +unsullied, pg3200.txt +unsung pg3200.txt +unsupplied.... pg3200.txt +unsupported pg3200.txt +unsure pg3200.txt, pg100.txt +unsure. pg100.txt +unsuspected pg31100.txt, pg3200.txt +unsuspected, pg31100.txt +unsuspecting pg3200.txt +unsuspecting, pg3200.txt +unsuspecting; pg3200.txt +unsuspectingly!)--until pg3200.txt +unsuspicious pg31100.txt +unsuspiciously: pg3200.txt +unsway'd? pg100.txt +unswear. pg100.txt +unswept, pg100.txt +unsworn; pg100.txt +unsympathetic; pg3200.txt +untainted pg31100.txt +untainted? pg100.txt +untaken, pg3200.txt +untangle pg3200.txt +untangled pg3200.txt +untarnished. pg3200.txt +untaught pg3200.txt +untaught, pg100.txt +unteachable pg3200.txt +untenable. pg3200.txt +untenable.] pg3200.txt +untender'd. pg100.txt +untender? pg100.txt +unterrified; pg3200.txt +unthankful pg3200.txt +unthankfulness! pg100.txt +unthinkable. pg3200.txt +unthinking pg3200.txt +unthinkingly pg3200.txt +unthrift pg100.txt +untidiness pg3200.txt +untie. pg100.txt +untied, pg3200.txt +until pg31100.txt, pg3200.txt, pg100.txt +until--' pg3200.txt +untir'd, pg100.txt +untitled pg3200.txt +unto pg3200.txt, pg100.txt +unto't pg100.txt +unto, pg100.txt +unto. pg100.txt +unto: pg100.txt +unto? pg100.txt +untold pg3200.txt +untold, pg100.txt +untold. pg3200.txt +untouch'd. pg100.txt +untouched pg3200.txt +untouched; pg3200.txt +untoward pg31100.txt +untrained pg3200.txt +untrained, pg3200.txt +untrammelled pg3200.txt +untried pg100.txt +untrimmed: pg100.txt +untroubled pg3200.txt +untroubled. pg3200.txt +untrue pg100.txt +untrue, pg3200.txt, pg100.txt +untrue. pg100.txt +untrue? pg100.txt +untruth, pg3200.txt +untruths!" pg3200.txt +untuneable. pg100.txt +untwine pg100.txt +untying pg3200.txt +unused pg31100.txt +unusual pg31100.txt, pg3200.txt +unusual--" pg3200.txt +unusual. pg3200.txt +unusual." pg31100.txt +unusual; pg3200.txt +unusual?" pg3200.txt +unusually pg3200.txt +unutterable, pg31100.txt +unvanquish'd, pg100.txt +unveiling pg3200.txt +unwarily pg100.txt +unwatchfulness, pg3200.txt +unwed. pg100.txt +unwelcome pg31100.txt, pg3200.txt +unwelcome, pg31100.txt +unwelcome. pg3200.txt, pg100.txt +unwelcome." pg31100.txt +unwelcome; pg31100.txt +unwell, pg31100.txt +unwell. pg3200.txt +unwell." pg3200.txt +unwell?" pg31100.txt +unwept! pg100.txt +unwhitewashed pg3200.txt +unwholesome pg31100.txt, pg100.txt +unwholesome." pg31100.txt +unwilling pg31100.txt, pg3200.txt +unwilling, pg3200.txt, pg100.txt +unwilling. pg100.txt +unwillingly pg100.txt +unwillingly, pg31100.txt +unwillingly. pg31100.txt, pg3200.txt, pg100.txt +unwillingness pg31100.txt +unwillingness, pg31100.txt, pg3200.txt +unwind pg3200.txt +unwinding pg3200.txt +unwise pg3200.txt +unwise, pg3200.txt +unwittingly? pg100.txt +unwonted pg100.txt +unworn pg3200.txt +unworthily pg100.txt +unworthily, pg31100.txt +unworthiness pg3200.txt +unworthiness. pg3200.txt +unworthy pg31100.txt, pg3200.txt, pg100.txt +unworthy. pg3200.txt +unwounded, pg31100.txt +unwritten pg3200.txt +unwrung. pg100.txt +unyielding pg31100.txt +unyoke. pg100.txt +up! pg3200.txt, pg100.txt +up!" pg3200.txt +up" pg3200.txt +up'; pg100.txt +up'ards, pg3200.txt +up), pg3200.txt +up, pg31100.txt, pg3200.txt, pg100.txt +up,' pg3200.txt +up,-- pg3200.txt +up- pg100.txt +up-- pg3200.txt +up--according pg3200.txt +up--and pg3200.txt +up--as pg3200.txt +up--but pg3200.txt +up--finish pg3200.txt +up--lands pg3200.txt +up--oh pg3200.txt +up--one pg31100.txt +up--steady, pg3200.txt +up--the pg3200.txt +up--they pg3200.txt +up-stairs pg3200.txt +up-stairs. pg3200.txt +up-stairs?" pg3200.txt +up-stream, pg3200.txt +up. pg31100.txt, pg3200.txt, pg100.txt +up." pg31100.txt, pg3200.txt +up."] pg3200.txt +up.' pg3200.txt, pg100.txt +up.'" pg3200.txt +up.) pg3200.txt +up..... pg3200.txt +up: pg3200.txt, pg100.txt +up; pg31100.txt, pg3200.txt, pg100.txt +up? pg3200.txt, pg100.txt +up?" pg3200.txt +up?' pg3200.txt +up] pg100.txt +upbraid pg31100.txt, pg3200.txt, pg100.txt +upbraided pg3200.txt +upbraidings: pg100.txt +upbraids pg100.txt +upcountry, pg3200.txt +update)] pg3200.txt +upheaval pg3200.txt +upheavals pg3200.txt +upheaved pg3200.txt +uphill--and pg3200.txt +uphill. pg3200.txt +uphold pg100.txt +uphold, pg100.txt +upholding pg3200.txt +upholds, pg100.txt +upholster pg3200.txt +upholstered pg3200.txt +upholsteries pg3200.txt +upland pg3200.txt +uplift pg3200.txt +uplift. pg3200.txt +uplift; pg100.txt +uplifted pg3200.txt +uplifted; pg3200.txt +uplifting pg3200.txt +upliftings pg3200.txt +upon! pg100.txt +upon's. pg100.txt +upon't pg100.txt +upon't! pg100.txt +upon't, pg100.txt +upon't. pg100.txt +upon't; pg100.txt +upon, pg31100.txt, pg3200.txt, pg100.txt +upon- pg100.txt +upon--" pg3200.txt +upon--the pg3200.txt +upon. pg31100.txt, pg3200.txt, pg100.txt +upon." pg3200.txt +upon; pg31100.txt, pg3200.txt, pg100.txt +upon? pg100.txt +upper pg31100.txt, pg3200.txt +upper, pg3200.txt +upper-class pg3200.txt +upper-deck pg3200.txt +uppercross pg31100.txt +uppercross, pg31100.txt +uppercross. pg31100.txt +uppercross." pg31100.txt +uppercross: pg31100.txt +uppercross; pg31100.txt +uppermost pg3200.txt +uppish. pg3200.txt +uprear, pg100.txt +upright pg31100.txt, pg3200.txt, pg100.txt +upright, pg3200.txt, pg100.txt +upright. pg100.txt +uprightness, pg31100.txt +uprightness; pg31100.txt +uprise? pg100.txt +uprising pg3200.txt +uprising, pg3200.txt +uproar pg3200.txt +uproar, pg100.txt +uproar. pg31100.txt +uproariously pg3200.txt +uprooting pg3200.txt +ups pg3200.txt +upset, pg3200.txt +upshot pg3200.txt +upside pg3200.txt, pg100.txt +upson. pg3200.txt +upstairs pg31100.txt, pg3200.txt, pg100.txt +upstairs, pg31100.txt +upstairs. pg31100.txt, pg3200.txt +upstairs." pg31100.txt +upstart!" pg3200.txt +upstart, pg31100.txt +upstart." pg31100.txt +upstream, pg3200.txt +upton's pg3200.txt +upturned pg3200.txt +upward pg3200.txt, pg100.txt +upward, pg3200.txt +upward--away pg3200.txt +upward. pg3200.txt, pg100.txt +upward; pg100.txt +upward] pg3200.txt +upwards pg3200.txt +upwards. pg100.txt +urbanity pg3200.txt +urchin pg3200.txt +urchinfield, pg100.txt +urchins pg100.txt +urchins, pg100.txt +urg'd pg100.txt +urg'd! pg100.txt +urg'd: pg100.txt +urg'd; pg100.txt +urge pg31100.txt, pg3200.txt, pg100.txt +urge, pg100.txt +urged pg31100.txt, pg3200.txt +urged, pg31100.txt +urgency pg31100.txt +urgent pg31100.txt, pg3200.txt +urgent, pg3200.txt +urgent. pg3200.txt +urgently." pg3200.txt +urges pg3200.txt +urges. pg100.txt +urging pg31100.txt, pg3200.txt +uriah's pg3200.txt +uriens pg3200.txt +uriens; pg3200.txt +urn pg3200.txt +urn, pg31100.txt, pg100.txt +urns, pg100.txt +ursley pg100.txt +ursula pg3200.txt, pg100.txt +ursula, pg3200.txt +ursula. pg100.txt +ursula]. pg100.txt +urswick pg100.txt +us! pg3200.txt, pg100.txt +us!" pg31100.txt, pg3200.txt, pg100.txt +us!' pg3200.txt +us!- pg100.txt +us!--clear, pg3200.txt +us!--we pg31100.txt +us!_ pg3200.txt +us'd pg100.txt +us'd, pg100.txt +us'd. pg100.txt +us'd? pg100.txt +us, pg31100.txt, pg3200.txt, pg100.txt +us,--these pg3200.txt +us- pg100.txt +us-- pg3200.txt +us--" pg3200.txt +us--'twas pg3200.txt +us--and pg3200.txt +us--but pg3200.txt +us--glucklich, pg3200.txt +us--if pg3200.txt +us--perhaps pg3200.txt +us--stacked pg3200.txt +us--surrounded pg3200.txt +us--take pg31100.txt +us--that pg31100.txt, pg3200.txt +us--we pg3200.txt +us. pg31100.txt, pg3200.txt, pg100.txt +us." pg31100.txt, pg3200.txt +us.' pg3200.txt +us.] pg3200.txt +us: pg3200.txt, pg100.txt +us; pg31100.txt, pg3200.txt, pg100.txt +us? pg3200.txt, pg100.txt +us?" pg31100.txt, pg3200.txt +us?". pg3200.txt +usable pg3200.txt +usable. pg3200.txt +usage pg3200.txt +usage, pg100.txt +usage. pg31100.txt, pg100.txt +usage." pg3200.txt +usage; pg100.txt +usages pg3200.txt +usances; pg100.txt +use pg31100.txt, pg3200.txt, pg100.txt +use! pg3200.txt, pg100.txt +use, pg31100.txt, pg3200.txt, pg100.txt +use- pg100.txt +use--he pg3200.txt +use. pg31100.txt, pg3200.txt, pg100.txt +use." pg31100.txt, pg3200.txt +use: pg100.txt +use; pg3200.txt, pg100.txt +use? pg3200.txt, pg100.txt +use?" pg31100.txt +useable pg3200.txt +used pg31100.txt, pg3200.txt, pg100.txt +used). pg3200.txt +used, pg31100.txt, pg3200.txt, pg100.txt +used--supposing pg3200.txt +used. pg3200.txt, pg100.txt +used: pg31100.txt +used; pg31100.txt +useful pg31100.txt, pg3200.txt +useful, pg31100.txt +useful--i pg31100.txt +useful. pg3200.txt +useful." pg31100.txt +useful?" pg3200.txt +usefulness pg3200.txt +usefulness, pg31100.txt, pg3200.txt +usefulness. pg3200.txt +usefulness? pg3200.txt +useless pg31100.txt, pg3200.txt +useless, pg31100.txt, pg3200.txt +useless. pg3200.txt +useless." pg3200.txt +useless; pg3200.txt +useless?--had pg31100.txt +uselessly--to pg3200.txt +uses pg3200.txt, pg100.txt +uses, pg3200.txt +uses. pg3200.txt, pg100.txt +uses; pg100.txt +usest, pg100.txt +useth pg100.txt +usher pg100.txt +usher, pg100.txt +ushered pg31100.txt, pg3200.txt +ushering, pg100.txt +ushers pg3200.txt +using pg31100.txt, pg3200.txt +usk, pg31100.txt +uske. pg31100.txt +uske." pg31100.txt +usual pg31100.txt, pg3200.txt +usual!" pg3200.txt +usual): pg3200.txt +usual, pg31100.txt, pg3200.txt +usual-- pg3200.txt +usual--a pg31100.txt +usual. pg31100.txt, pg3200.txt +usual.--you pg31100.txt +usual: pg3200.txt +usual; pg3200.txt +usual?" pg3200.txt +usually pg31100.txt, pg3200.txt +usually, pg3200.txt +usually- pg100.txt +usufruct?" pg3200.txt +usurer! pg100.txt +usurer; pg100.txt +usurers' pg100.txt +usurers? pg100.txt +usuries, pg100.txt +usurp pg3200.txt, pg100.txt +usurp'd, pg100.txt +usurp'd. pg100.txt +usurp'st, pg100.txt +usurp. pg100.txt +usurp; pg100.txt +usurpation pg3200.txt, pg100.txt +usurpation, pg3200.txt +usurpation. pg3200.txt, pg100.txt +usurper, pg3200.txt +usurper. pg3200.txt, pg100.txt +usurping pg100.txt +usurps, pg100.txt +usury pg100.txt +usury, pg100.txt +usury. pg3200.txt +ut." pg3200.txt +ut?" pg3200.txt +utah pg3200.txt +utah. pg3200.txt +utility pg31100.txt +utility. pg31100.txt, pg100.txt +utilize pg3200.txt +utmost pg31100.txt, pg3200.txt, pg100.txt +utmost, pg31100.txt, pg3200.txt +utmost--she pg3200.txt +utmost. pg31100.txt, pg3200.txt +utmost; pg31100.txt +utt'red. pg100.txt +utter pg31100.txt, pg3200.txt, pg100.txt +utter'd pg100.txt +utter, pg31100.txt, pg3200.txt +utter. pg3200.txt, pg100.txt +utter: pg100.txt +utter? pg100.txt +utterance pg3200.txt +utterance. pg3200.txt, pg100.txt +utterance: pg3200.txt +utterances, pg3200.txt +uttered pg31100.txt, pg3200.txt, pg100.txt +uttered!" pg3200.txt +uttered, pg31100.txt, pg3200.txt +uttered--'tis pg3200.txt +uttered. pg3200.txt, pg100.txt +uttered: pg3200.txt +uttered; pg100.txt +utterer's pg3200.txt +utterers. pg3200.txt +uttering pg3200.txt +utterly pg31100.txt, pg3200.txt, pg100.txt +utterly, pg3200.txt, pg100.txt +utterly. pg3200.txt, pg100.txt +utterly; pg3200.txt +uttermost pg3200.txt, pg100.txt +uttermost, pg100.txt +uttermost. pg100.txt +utters! pg100.txt +uv pg3200.txt +uwaine pg3200.txt +v's--and pg3200.txt +v, pg3200.txt +v-shaped pg3200.txt +v. pg31100.txt, pg3200.txt, pg100.txt +v.12.08.93] pg100.txt +v.c., pg3200.txt +va, pg3200.txt +vacancies pg3200.txt +vacancies. pg3200.txt +vacancy pg3200.txt +vacancy, pg3200.txt, pg100.txt +vacancy. pg3200.txt +vacant pg3200.txt +vacant, pg31100.txt, pg3200.txt +vacant,'--his pg3200.txt +vacant. pg31100.txt, pg3200.txt +vacant? pg3200.txt +vacantly pg3200.txt +vacate. pg3200.txt +vacated. pg3200.txt +vacation, pg3200.txt +vacation. pg3200.txt +vacation." pg3200.txt +vaccinated pg3200.txt +vacuum pg3200.txt +vacuum. pg3200.txt +vagabond pg3200.txt +vagabond!" pg3200.txt +vagabond' pg3200.txt +vagabond; pg3200.txt +vagabonds pg3200.txt +vagabonds, pg3200.txt +vagabonds. pg3200.txt +vagaries pg3200.txt +vagrant pg3200.txt +vague pg3200.txt +vague, pg3200.txt +vague--because pg3200.txt +vaguely pg3200.txt +vaguely." pg3200.txt +vagueness pg3200.txt +vail pg3200.txt +vaillants; pg3200.txt +vain pg31100.txt, pg3200.txt, pg100.txt +vain, pg31100.txt, pg3200.txt, pg100.txt +vain-- pg3200.txt +vain-glories? pg100.txt +vain-glory pg100.txt +vain-glory, pg100.txt +vain. pg31100.txt, pg3200.txt, pg100.txt +vain." pg31100.txt +vain.- pg100.txt +vain; pg31100.txt, pg3200.txt, pg100.txt +vais pg100.txt +vakeel pg3200.txt +valais, pg3200.txt +valancourt pg31100.txt +vale pg3200.txt +vale, pg3200.txt, pg100.txt +vale." pg3200.txt +valedictory--conclusion pg3200.txt +valeer pg3200.txt +valeer, pg3200.txt +valeer-- pg3200.txt +valeer. pg3200.txt +valence, pg100.txt +valentine pg100.txt +valentine! pg100.txt +valentine, pg100.txt +valentine. pg100.txt +valentine; pg100.txt +valentine? pg100.txt +valeria pg100.txt +valeria! pg100.txt +valeria, pg100.txt +valerian! pg3200.txt +valerian, pg3200.txt +vales, pg100.txt +valet." pg3200.txt +valey. pg3200.txt +valiant pg100.txt +valiant, pg3200.txt, pg100.txt +valiant-young, pg100.txt +valiant. pg100.txt +valiant; pg100.txt +valiant? pg100.txt +valiantly! pg100.txt +validity pg100.txt +validity, pg100.txt +validity; pg100.txt +valise pg3200.txt +valise. pg3200.txt +valkenburgh, pg3200.txt +vallant pg100.txt +valley pg31100.txt, pg3200.txt +valley! pg3200.txt +valley, pg31100.txt, pg3200.txt, pg100.txt +valley. pg3200.txt +valley." pg3200.txt +valley; pg3200.txt +valleys pg3200.txt +valleys, pg3200.txt +valleys--and pg3200.txt +valleys. pg3200.txt, pg100.txt +valor pg3200.txt, pg100.txt +valor, pg100.txt +valor. pg3200.txt +valorous pg3200.txt, pg100.txt +valour pg100.txt +valour! pg100.txt +valour, pg100.txt +valour. pg100.txt +valour; pg100.txt +valour? pg100.txt +valuable pg31100.txt, pg3200.txt +valuable! pg3200.txt +valuable, pg3200.txt +valuable. pg31100.txt, pg3200.txt +valuable." pg3200.txt +valuable; pg31100.txt, pg3200.txt +valuable? pg3200.txt +valuable?" pg3200.txt +valuable?' pg3200.txt +valuables pg3200.txt +valuation pg100.txt +valuation; pg3200.txt +value pg31100.txt, pg3200.txt, pg100.txt +value!" pg31100.txt +value, pg3200.txt +value--among pg3200.txt +value--none pg3200.txt +value. pg31100.txt, pg3200.txt, pg100.txt +value." pg31100.txt, pg3200.txt +value; pg3200.txt +value? pg3200.txt, pg100.txt +valued pg31100.txt, pg3200.txt +valued, pg100.txt +valued? pg100.txt +valueless pg3200.txt +valueless. pg31100.txt, pg3200.txt +valueless: pg3200.txt +values pg3200.txt, pg100.txt +values, pg31100.txt, pg3200.txt +values--then pg3200.txt +values. pg3200.txt +values; pg3200.txt +vampire pg3200.txt +vampire, pg3200.txt +van pg3200.txt +van, pg3200.txt +van- pg3200.txt +vanderbilt pg3200.txt +vanderbilt----" pg3200.txt +vanish pg3200.txt, pg100.txt +vanish'd. pg100.txt +vanish'd? pg100.txt +vanish, pg100.txt +vanish. pg100.txt +vanish." pg31100.txt +vanish] pg100.txt +vanished pg3200.txt +vanished, pg31100.txt, pg3200.txt +vanished. pg3200.txt, pg100.txt +vanished; pg3200.txt +vanishes pg3200.txt, pg100.txt +vanishes. pg3200.txt +vanishest. pg100.txt +vanishing pg3200.txt +vanities pg31100.txt, pg3200.txt +vanities! pg100.txt +vanities. pg100.txt +vanity pg31100.txt, pg3200.txt, pg100.txt +vanity! pg100.txt +vanity, pg31100.txt, pg3200.txt, pg100.txt +vanity- pg100.txt +vanity. pg3200.txt, pg100.txt +vanity: pg100.txt +vanity; pg3200.txt, pg100.txt +vanquish pg3200.txt +vanquish'd, pg100.txt +vanquished pg3200.txt +vanquished, pg100.txt +vanquished. pg100.txt +vanquisher. pg100.txt +vanquishing pg3200.txt +vant, pg100.txt +vantage, pg100.txt +vantage- pg100.txt +vantage-ground, pg3200.txt +vantage. pg100.txt +vantages pg100.txt +vantages. pg100.txt +vantages; pg100.txt +vapor pg3200.txt +vapor, pg3200.txt +vapor. pg3200.txt +vaporing. pg3200.txt +vaporized pg3200.txt +vaporous pg3200.txt +vapory pg3200.txt +vapour. pg100.txt +vapours. pg100.txt +variable, pg100.txt +variable. pg3200.txt, pg100.txt +variable." pg3200.txt +variance. pg31100.txt +variation pg31100.txt, pg3200.txt +variation. pg3200.txt +variations pg3200.txt +variations,) pg3200.txt +variations. pg100.txt +varied pg31100.txt, pg3200.txt +varied. pg31100.txt +variegated pg3200.txt +varies pg3200.txt +varies; pg3200.txt +varieties pg31100.txt +varieties, pg31100.txt +varieties--" pg3200.txt +varieties--varieties pg3200.txt +variety pg31100.txt, pg3200.txt +variety, pg31100.txt, pg3200.txt +variety. pg3200.txt +various pg31100.txt, pg3200.txt +varlet! pg100.txt +varlet, pg100.txt +varlet. pg100.txt +varlet; pg100.txt +varletry pg100.txt +varlets, pg3200.txt +varlets." pg3200.txt +varlets? pg100.txt +varletto. pg100.txt +varnish; pg3200.txt +varnished pg3200.txt +varrius pg100.txt +varrius! pg100.txt +varro pg100.txt +varro. pg100.txt +vary pg31100.txt +varying pg31100.txt, pg3200.txt +vassal pg3200.txt +vassal, pg100.txt +vassal,' pg100.txt +vassal. pg100.txt +vassalage pg100.txt +vassals pg3200.txt +vassals; pg3200.txt +vast pg31100.txt, pg3200.txt +vast, pg3200.txt +vastly pg3200.txt +vastness pg3200.txt +vastnesses pg3200.txt +vatican, pg3200.txt +vatican; pg3200.txt +vats pg3200.txt +vaucouleurs pg3200.txt +vaucouleurs, pg3200.txt +vaudemont, pg100.txt +vaughan pg100.txt +vaughan, pg100.txt +vault pg3200.txt, pg100.txt +vault, pg100.txt +vault. pg100.txt +vault; pg100.txt +vaulting, pg3200.txt +vaults pg3200.txt +vaults, pg3200.txt +vaults." pg3200.txt +vaults?" pg3200.txt +vaunted pg31100.txt +vaux pg100.txt +vaux, pg100.txt +vaward pg100.txt +vaward. pg100.txt +vecchia pg3200.txt +vecchia. pg3200.txt +vecchio pg3200.txt +vegetable pg3200.txt +vegetables pg3200.txt +vegetables--a pg3200.txt +vegetables?), pg3200.txt +vegetation pg3200.txt +vegetation, pg3200.txt +vegetation. pg3200.txt +vehemence pg31100.txt +vehemency pg100.txt +vehement pg3200.txt +vehement; pg100.txt +vehicle pg3200.txt +vehicle, pg3200.txt +vehicle. pg3200.txt +vehicles pg3200.txt +vehicles, pg3200.txt +vehicles. pg3200.txt +veicolo. pg3200.txt +veil pg3200.txt +veil, pg3200.txt, pg100.txt +veil. pg3200.txt +veil." pg31100.txt +veiled pg3200.txt, pg100.txt +veiling pg3200.txt +veils pg3200.txt +veils; pg31100.txt, pg3200.txt +vein pg3200.txt +vein! pg100.txt +vein, pg3200.txt, pg100.txt +vein--looked pg3200.txt +vein. pg3200.txt, pg100.txt +vein." pg3200.txt +vein: pg3200.txt +vein; pg100.txt +veins pg3200.txt, pg100.txt +veins! pg100.txt +veins!" pg3200.txt +veins, pg100.txt +veins--and pg3200.txt +veins. pg100.txt +veins; pg3200.txt, pg100.txt +vell? pg100.txt +velocipede. pg3200.txt +velutus pg100.txt +velvet pg3200.txt, pg100.txt +velvet, pg3200.txt +velvet. pg100.txt +velvets pg3200.txt +velvets. pg3200.txt +velvety pg3200.txt +vendible. pg100.txt +vendue pg3200.txt +veneered pg3200.txt +venerable pg31100.txt, pg3200.txt, pg100.txt +venerable--for pg3200.txt +venerable. pg3200.txt +venerating pg31100.txt +veneration pg3200.txt +veneration, pg31100.txt +veneration--look pg3200.txt +venetia, pg100.txt +venetian pg3200.txt, pg100.txt +venetians pg3200.txt +venetians, pg100.txt +venette pg3200.txt +venge pg100.txt +vengeance pg3200.txt, pg100.txt +vengeance! pg3200.txt, pg100.txt +vengeance, pg100.txt +vengeance- pg100.txt +vengeance-prompted pg3200.txt +vengeance. pg3200.txt, pg100.txt +vengeful pg3200.txt +venice pg3200.txt, pg100.txt +venice, pg3200.txt, pg100.txt +venice. pg3200.txt, pg100.txt +venice: pg100.txt +venice; pg100.txt +venice? pg3200.txt, pg100.txt +venison! pg100.txt +venison? pg100.txt +venit? pg100.txt +venom pg3200.txt +venom, pg100.txt +venom-mouth'd, pg100.txt +venom. pg3200.txt +venomous, pg100.txt +vent pg31100.txt, pg3200.txt, pg100.txt +vent. pg100.txt +vent; pg100.txt +ventages pg100.txt +ventidius pg100.txt +ventidius, pg100.txt +ventidius. pg100.txt +ventilation pg3200.txt +ventilators, pg3200.txt +vents pg100.txt +vents. pg100.txt +ventur'd pg100.txt +ventur'd, pg100.txt +ventur'd. pg100.txt +venture pg31100.txt, pg3200.txt, pg100.txt +venture!" pg3200.txt +venture, pg31100.txt, pg3200.txt +venture. pg3200.txt, pg100.txt +venture: pg3200.txt +venture? pg100.txt +ventured pg31100.txt, pg3200.txt +ventures pg3200.txt, pg100.txt +ventures, pg3200.txt +ventures. pg100.txt +venturing pg31100.txt, pg3200.txt +venturing. pg100.txt +venturous pg100.txt +venturous; pg100.txt +venus pg3200.txt, pg100.txt +venus!" pg3200.txt +venus, pg3200.txt, pg100.txt +venus. pg3200.txt +venus." pg3200.txt +venuto, pg100.txt +veracity pg3200.txt +veracity. pg31100.txt, pg3200.txt +veranda pg3200.txt +veranda, pg31100.txt +verandah pg3200.txt +verandas pg3200.txt +verandas, pg3200.txt +verb pg3200.txt +verb, pg3200.txt, pg100.txt +verbal pg3200.txt +verbal." pg3200.txt +verbal?" pg3200.txt +verbally, pg3200.txt +verbatim pg3200.txt +verdant pg3200.txt +verde pg3200.txt +verdict pg3200.txt, pg100.txt +verdict, pg3200.txt +verdict-- pg3200.txt +verdict. pg3200.txt +verdict.) pg3200.txt +verdict: pg3200.txt +verdict; pg3200.txt +verdict? pg100.txt +verdict?" pg3200.txt +verdicts pg31100.txt +verdicts--striking pg3200.txt +verdigrease pg3200.txt +verdigrease?" pg3200.txt +verdure, pg31100.txt, pg3200.txt +verdure-clad pg3200.txt +vere pg100.txt +vere, pg100.txt +vergangenheit--no, pg3200.txt +verge pg3200.txt, pg100.txt +verge, pg100.txt +verge-staff pg3200.txt +verge. pg3200.txt, pg100.txt +verges. pg100.txt +verges]. pg100.txt +verhungre! pg3200.txt +verier pg100.txt +verification pg3200.txt +verified pg3200.txt, pg100.txt +verified, pg100.txt +verified; pg100.txt +verify pg3200.txt, pg100.txt +verily pg31100.txt, pg3200.txt, pg100.txt +verily! pg100.txt +verily, pg3200.txt, pg100.txt +verily. pg100.txt +verity pg3200.txt, pg100.txt +verity, pg100.txt +verity. pg100.txt +vermaehlt: pg3200.txt +vermicelli, pg3200.txt +vermifuge." pg3200.txt +vermin pg3200.txt +vermin--but pg3200.txt +vermin-tortured pg3200.txt +vermin. pg3200.txt, pg100.txt +vernal pg3200.txt +vernon pg31100.txt, pg100.txt +vernon, pg31100.txt +vernon. pg31100.txt, pg100.txt +vernon; pg31100.txt +vernon] pg100.txt +vernon]. pg100.txt +verona pg100.txt +verona. pg100.txt +verona? pg100.txt +versailles pg3200.txt +versailles. pg3200.txt +versatile pg3200.txt +versatility pg3200.txt +verse pg3200.txt, pg100.txt +verse, pg100.txt +verse. pg3200.txt, pg100.txt +verse: pg3200.txt +versen's, pg3200.txt +verses pg31100.txt, pg3200.txt +verses, pg31100.txt +verses. pg100.txt +verses: pg3200.txt +verses? pg100.txt +versifiers pg3200.txt +version pg31100.txt, pg3200.txt, pg100.txt +version, pg3200.txt +version.' pg3200.txt +version: pg3200.txt +vertebrate. pg3200.txt +vertebry. pg3200.txt +vertical pg3200.txt +vertu, pg3200.txt +verus. pg3200.txt +very! pg3200.txt +very, pg3200.txt +very--an pg31100.txt +very. pg3200.txt +very." pg31100.txt, pg3200.txt +vespers?" pg3200.txt +vessel pg3200.txt, pg100.txt +vessel, pg3200.txt, pg100.txt +vessel. pg3200.txt, pg100.txt +vessels pg3200.txt +vessels, pg3200.txt, pg100.txt +vessels. pg3200.txt +vessels." pg3200.txt +vest pg3200.txt +vest!" pg3200.txt +vest-pocket; pg3200.txt +vest. pg3200.txt +vested pg3200.txt +vestibule pg31100.txt +vestibule. pg3200.txt +vestibuled pg3200.txt +vestige pg3200.txt +vestiges pg3200.txt +vestments?" pg3200.txt +vestrymen?" pg3200.txt +vests, pg3200.txt +vests; pg3200.txt +veststrap. pg3200.txt +vesuvian pg3200.txt +vesuvius pg3200.txt +vesuvius--a pg3200.txt +vesuvius--continued. pg3200.txt +vesuvius. pg3200.txt +veteran pg3200.txt +veteran, pg3200.txt +veteran. pg3200.txt +veterans pg3200.txt +veterans? pg3200.txt +veto pg3200.txt +veto, pg3200.txt +vetoes pg3200.txt +vex pg31100.txt, pg3200.txt +vex'd pg100.txt +vex'd. pg100.txt +vex'd; pg100.txt +vex'd? pg100.txt +vexation pg31100.txt +vexation). pg3200.txt +vexation, pg31100.txt, pg3200.txt +vexation-- pg3200.txt +vexation. pg31100.txt, pg3200.txt, pg100.txt +vexation." pg31100.txt +vexations pg31100.txt, pg100.txt +vexations, pg3200.txt +vexatious pg31100.txt +vexed pg31100.txt, pg3200.txt, pg100.txt +vexed, pg31100.txt, pg3200.txt +vexed. pg31100.txt +vexes pg3200.txt +vi pg31100.txt, pg3200.txt, pg100.txt +vi--" pg3200.txt +vi. pg3200.txt, pg100.txt +vi.). pg3200.txt +via! pg100.txt +vial pg3200.txt +vial, pg100.txt +vial. pg100.txt +vial." pg3200.txt +viands pg3200.txt, pg100.txt +vicar pg31100.txt, pg100.txt +vicar. pg100.txt +vicarage pg31100.txt +vicarage, pg31100.txt +vicarage." pg31100.txt +vice pg3200.txt, pg100.txt +vice! pg100.txt +vice, pg31100.txt, pg3200.txt, pg100.txt +vice-inquisitor, pg3200.txt +vice-president pg3200.txt +vice-president, pg3200.txt +vice. pg100.txt +vice; pg100.txt +vice? pg100.txt +vicentio! pg100.txt +vicentio. pg100.txt +viceroyalties. pg3200.txt +viceroys, pg3200.txt +vices pg3200.txt, pg100.txt +vices! pg100.txt +vices, pg31100.txt, pg3200.txt, pg100.txt +vices.' pg100.txt +vicinity pg31100.txt, pg3200.txt +vicinity, pg3200.txt +vicinity. pg3200.txt +vicious pg3200.txt, pg100.txt +vicious, pg3200.txt +vicissitudes pg3200.txt +vicksburg. pg3200.txt +vicksburg? pg3200.txt +vicksburger pg3200.txt +victim pg31100.txt, pg3200.txt +victim, pg3200.txt +victim. pg3200.txt +victim; pg3200.txt +victimizer. pg3200.txt +victims pg3200.txt, pg100.txt +victims. pg3200.txt +victims." pg3200.txt +victor. pg3200.txt +victoria pg3200.txt +victoria'; pg3200.txt +victoria's pg3200.txt +victoria, pg3200.txt +victoria--a pg3200.txt +victoria. pg3200.txt +victories pg3200.txt, pg100.txt +victories, pg100.txt +victorious pg3200.txt +victorious!" pg3200.txt +victorious, pg3200.txt +victors pg3200.txt +victory pg3200.txt, pg100.txt +victory! pg100.txt +victory!" pg3200.txt +victory, pg31100.txt, pg3200.txt, pg100.txt +victory. pg3200.txt, pg100.txt +victory; pg3200.txt +victuals, pg100.txt +victuals; pg100.txt +videlicet:- pg100.txt +vides? pg100.txt +vie, pg100.txt +vielleicht--umsteigen? pg3200.txt +vienna pg3200.txt, pg100.txt +vienna, pg3200.txt, pg100.txt +vienna. pg3200.txt, pg100.txt +vienna." pg3200.txt +view pg31100.txt, pg3200.txt, pg100.txt +view! pg100.txt +view'd pg3200.txt +view'd, pg100.txt +view'd. pg100.txt +view), pg3200.txt +view, pg31100.txt, pg3200.txt, pg100.txt +view," pg3200.txt +view--and pg3200.txt +view--at pg31100.txt +view--but pg3200.txt +view--with pg3200.txt +view. pg31100.txt, pg3200.txt, pg100.txt +view." pg31100.txt, pg3200.txt +view; pg31100.txt, pg3200.txt, pg100.txt +viewed pg31100.txt, pg3200.txt +viewed, pg31100.txt, pg3200.txt, pg100.txt +viewed. pg3200.txt +viewest, pg100.txt +viewing pg31100.txt, pg3200.txt +views pg31100.txt, pg3200.txt +views! pg31100.txt, pg100.txt +views, pg31100.txt, pg3200.txt +views--trip pg3200.txt +views. pg31100.txt, pg3200.txt +views." pg3200.txt +views; pg31100.txt +views? pg31100.txt +vigil pg3200.txt +vigilance pg3200.txt, pg100.txt +vigilance, pg100.txt +vigilance. pg3200.txt +vigilant pg3200.txt +vigilant. pg100.txt +vigilante; pg3200.txt +vigilantes pg3200.txt +vigilantly." pg3200.txt +vigor pg3200.txt +vigor, pg3200.txt +vigor. pg3200.txt +vigor." pg3200.txt +vigorous pg3200.txt +vigorous, pg3200.txt +vigorous. pg3200.txt +vigorously pg3200.txt +vigorously, pg3200.txt +vigour pg31100.txt +vigour, pg100.txt +vii pg31100.txt, pg3200.txt, pg100.txt +vii. pg3200.txt, pg100.txt +viii pg31100.txt, pg3200.txt +viii. pg3200.txt, pg100.txt +viii., pg3200.txt +vile pg31100.txt, pg3200.txt, pg100.txt +vile! pg100.txt +vile, pg3200.txt, pg100.txt +vile- pg100.txt +vile." pg3200.txt +vile; pg100.txt +vilely pg100.txt +vilest, pg3200.txt +vilified, pg3200.txt +vill pg100.txt +villa pg3200.txt +village pg31100.txt, pg3200.txt +village), pg3200.txt +village, pg31100.txt, pg3200.txt, pg100.txt +village-cock pg100.txt +village. pg31100.txt, pg3200.txt +village.] pg3200.txt +village; pg31100.txt, pg3200.txt, pg100.txt +villager pg3200.txt, pg100.txt +villager, pg3200.txt +villagers pg3200.txt +villagers, pg3200.txt +villagers." pg3200.txt +villagery, pg100.txt +villages pg3200.txt +villages, pg3200.txt, pg100.txt +villages--where pg3200.txt +villages. pg3200.txt +villain pg3200.txt, pg100.txt +villain! pg3200.txt, pg100.txt +villain's pg100.txt +villain, pg100.txt +villain. pg3200.txt, pg100.txt +villain.' pg100.txt +villain.-- pg31100.txt +villain; pg100.txt +villain? pg100.txt +villain?' pg100.txt +villainies pg100.txt +villainies, pg100.txt +villainies. pg100.txt +villainous pg31100.txt, pg3200.txt +villains pg100.txt +villains! pg100.txt +villains, pg100.txt +villains. pg3200.txt, pg100.txt +villainy pg3200.txt, pg100.txt +villainy! pg100.txt +villainy!" pg3200.txt +villainy, pg3200.txt, pg100.txt +villainy. pg31100.txt, pg100.txt +villainy: pg100.txt +villainy; pg100.txt +villainy? pg100.txt +villanies, pg100.txt +villanies. pg100.txt +villanous pg3200.txt, pg100.txt +villany pg100.txt +villany! pg100.txt +villany. pg3200.txt, pg100.txt +villany? pg100.txt +villas pg3200.txt +villebois, pg3200.txt +villein." pg3200.txt +villian's pg100.txt +villians pg31100.txt +villierses; pg3200.txt +villified pg3200.txt +vin?" pg3200.txt +vincentio pg100.txt +vincentio, pg100.txt +vincentio. pg100.txt +vincentio; pg100.txt +vincentio? pg100.txt +vinchy" pg3200.txt +vinci pg3200.txt +vinci's. pg3200.txt +vindicating pg31100.txt +vindication pg31100.txt +vindictive, pg3200.txt +vindictively pg3200.txt +vindictively-- pg3200.txt +vine pg100.txt +vine! pg100.txt +vine, pg100.txt +vine." pg3200.txt +vinegar pg100.txt +vines pg3200.txt +vines, pg3200.txt, pg100.txt +vines. pg3200.txt +vineyard, pg3200.txt +vineyard. pg3200.txt +vineyard; pg100.txt +vintage, pg3200.txt +vintner. pg100.txt +vintner.] pg100.txt +viol-de-gamboys, pg100.txt +viola pg100.txt +viola!' pg100.txt +violate pg3200.txt, pg100.txt +violate!- pg100.txt +violated pg3200.txt +violated." pg31100.txt +violates pg31100.txt, pg3200.txt, pg100.txt +violation pg100.txt +violation? pg100.txt +violence pg31100.txt, pg3200.txt, pg100.txt +violence! pg100.txt +violence, pg31100.txt, pg3200.txt +violence- pg100.txt +violence. pg31100.txt, pg100.txt +violence; pg3200.txt, pg100.txt +violent pg31100.txt, pg3200.txt, pg100.txt +violent--violent pg3200.txt +violent. pg31100.txt, pg3200.txt +violent." pg31100.txt +violently pg31100.txt, pg3200.txt +violently; pg3200.txt +violet, pg100.txt +violets pg100.txt +violets, pg100.txt +violets. pg3200.txt +violin, pg3200.txt +violins pg31100.txt +viper pg100.txt +virgil, pg3200.txt +virgilia pg100.txt +virgin pg3200.txt +virgin!" pg3200.txt +virgin's pg3200.txt +virgin, pg100.txt +virgin-knot pg100.txt +virgin-violator, pg100.txt +virgin. pg3200.txt, pg100.txt +virgin; pg100.txt +virginal pg100.txt +virginalling pg100.txt +virginia pg3200.txt +virginia, pg3200.txt +virginia--a pg3200.txt +virginia--it pg3200.txt +virginia--results pg3200.txt +virginia--who pg3200.txt +virginia. pg3200.txt +virginia; pg3200.txt +virginian pg3200.txt +virginity pg100.txt +virginity, pg100.txt +virginity-- pg100.txt +virginity. pg100.txt +virginity? pg100.txt +virginius pg100.txt +virgins pg3200.txt +virgins, pg3200.txt +virile, pg3200.txt +virtue pg31100.txt, pg3200.txt, pg100.txt +virtue! pg100.txt +virtue, pg31100.txt, pg3200.txt, pg100.txt +virtue- pg100.txt +virtue. pg3200.txt, pg100.txt +virtue." pg31100.txt +virtue..... pg3200.txt +virtue; pg100.txt +virtue? pg3200.txt +virtues pg31100.txt, pg3200.txt, pg100.txt +virtues, pg3200.txt, pg100.txt +virtues--only pg3200.txt +virtues. pg31100.txt, pg3200.txt, pg100.txt +virtues." pg31100.txt +virtues.' pg100.txt +virtues; pg100.txt +virtuous pg3200.txt, pg100.txt +virtuous! pg100.txt +virtuous, pg100.txt +virtuous- pg100.txt +virtuous. pg100.txt +virtuous? pg100.txt +virus, pg100.txt +visa, pg3200.txt +visage pg3200.txt, pg100.txt +visage! pg100.txt +visage, pg100.txt +visage. pg100.txt +visage? pg100.txt +visages pg100.txt +viscera pg3200.txt +viscera, pg3200.txt +viscount pg3200.txt +viscount, pg31100.txt +viscount-berkeley, pg3200.txt +viscounts; pg3200.txt +vishnu pg3200.txt +visible pg31100.txt, pg3200.txt +visible, pg3200.txt +visible. pg31100.txt, pg3200.txt +visible." pg3200.txt +visible: pg31100.txt +visible; pg3200.txt +visible? pg3200.txt +visibly pg31100.txt +vision pg3200.txt, pg100.txt +vision! pg3200.txt +vision!" pg3200.txt +vision, pg3200.txt, pg100.txt +vision- pg100.txt +vision. pg3200.txt, pg100.txt +vision; pg100.txt +visionary pg3200.txt +visionary. pg3200.txt +visioning pg3200.txt +visions pg31100.txt, pg3200.txt +visions, pg3200.txt +visions. pg3200.txt +visions; pg100.txt +visit pg31100.txt, pg3200.txt, pg100.txt +visit! pg3200.txt +visit!--ah, pg3200.txt +visit, pg31100.txt, pg3200.txt, pg100.txt +visit. pg31100.txt, pg3200.txt +visit." pg31100.txt +visit; pg31100.txt +visit? pg3200.txt +visit?" pg31100.txt, pg3200.txt +visitation pg3200.txt, pg100.txt +visitation, pg100.txt +visitation- pg100.txt +visitation. pg100.txt +visitation; pg100.txt +visitations pg100.txt +visited pg31100.txt, pg3200.txt +visited, pg3200.txt +visited--the pg3200.txt +visited. pg3200.txt, pg100.txt +visited; pg31100.txt, pg100.txt +visiting pg3200.txt +visiting-card. pg3200.txt +visiting. pg3200.txt +visitor pg31100.txt, pg3200.txt +visitor, pg31100.txt +visitor. pg31100.txt, pg3200.txt +visitor: pg3200.txt +visitors pg31100.txt, pg3200.txt +visitors' pg3200.txt +visitors, pg31100.txt, pg3200.txt +visitors--"munchausenizing" pg3200.txt +visitors. pg31100.txt, pg3200.txt, pg100.txt +visitors." pg31100.txt +visitors? pg3200.txt +visits pg31100.txt, pg3200.txt, pg100.txt +visits, pg31100.txt, pg3200.txt +visits; pg31100.txt +visor pg100.txt +visor. pg100.txt +visp, pg3200.txt +vista pg3200.txt +vista, pg3200.txt +vistas pg3200.txt +vitae. pg100.txt +vital pg3200.txt, pg100.txt +vitality, pg3200.txt +vitals pg3200.txt +vitals. pg3200.txt +vitement. pg100.txt +vitruvio; pg100.txt +vittles, pg3200.txt +vittles. pg3200.txt +vittles." pg3200.txt +vittles?" pg3200.txt +vituperation. pg3200.txt +vitznau pg3200.txt +vitznau, pg3200.txt +vivacity pg3200.txt +vivacity, pg31100.txt, pg3200.txt +vivacity. pg31100.txt +vivacity: pg3200.txt +vive' pg3200.txt +viviani pg3200.txt +vivid pg3200.txt +vivid, pg3200.txt +vivid. pg3200.txt +vivid; pg3200.txt +vividly pg3200.txt +vividness, pg3200.txt +vivisection, pg3200.txt +viz., pg3200.txt +viz.: pg3200.txt +vizarded, pg100.txt +vizarded- pg100.txt +vizards pg100.txt +vizards. pg100.txt +vizier pg3200.txt +vlouting-stog. pg100.txt +vocabulary pg3200.txt +vocabulary! pg3200.txt +vocabulary, pg3200.txt +vocabulary. pg3200.txt +vocabulary; pg3200.txt +vocal pg3200.txt +vocation pg3200.txt, pg100.txt +vocation'; pg100.txt +vocation, pg3200.txt +vocation--had pg3200.txt +vocation. pg100.txt +vocatur pg100.txt +vogue pg3200.txt +vogue, pg3200.txt +voice pg31100.txt, pg3200.txt, pg100.txt +voice! pg3200.txt +voice!" pg3200.txt +voice)--nobody pg31100.txt +voice, pg31100.txt, pg3200.txt, pg100.txt +voice,-- pg31100.txt +voice- pg100.txt +voice-- pg31100.txt, pg3200.txt +voice--more pg31100.txt +voice. pg31100.txt, pg3200.txt, pg100.txt +voice." pg31100.txt, pg3200.txt +voice.] pg3200.txt +voice: pg31100.txt, pg3200.txt +voice; pg3200.txt, pg100.txt +voice? pg3200.txt, pg100.txt +voice?" pg3200.txt +voice], pg3200.txt +voiced; pg3200.txt +voices pg31100.txt, pg3200.txt, pg100.txt +voices! pg100.txt +voices, pg3200.txt, pg100.txt +voices. pg3200.txt, pg100.txt +voices." pg3200.txt +voices; pg3200.txt +voices? pg100.txt +voices?" pg3200.txt +void pg3200.txt, pg100.txt +void. pg3200.txt +void." pg3200.txt +volant, pg100.txt +volcanic pg3200.txt +volcano pg3200.txt +volcano--he pg3200.txt +volcano; pg3200.txt +volcano?" pg3200.txt +volcanoes. pg3200.txt +volition pg3200.txt +volivorco. pg100.txt +volley pg3200.txt +volley. pg3200.txt, pg100.txt +volleys pg3200.txt +volsce, pg100.txt +volsces pg100.txt +volsces, pg100.txt +volscian pg100.txt +volscians pg100.txt +volscians- pg100.txt +volscians. pg100.txt +voltemand, pg100.txt +volubility pg3200.txt +volubility, pg100.txt +volubility; pg31100.txt +volume pg31100.txt, pg3200.txt, pg100.txt +volume, pg3200.txt +volume--thus: pg3200.txt +volume. pg3200.txt, pg100.txt +volume." pg31100.txt +volume; pg3200.txt +volumes pg31100.txt, pg3200.txt, pg100.txt +volumes, pg31100.txt, pg3200.txt +volumes." pg31100.txt +volumes: pg3200.txt +voluminous pg3200.txt +voluminously pg3200.txt +volumnia pg100.txt +volumnius, pg100.txt +volumnius. pg100.txt +volumnius: pg100.txt +voluntaries, pg100.txt +voluntarily pg31100.txt, pg3200.txt +voluntarily; pg3200.txt +voluntary pg31100.txt, pg3200.txt, pg100.txt +voluntary, pg31100.txt, pg100.txt +voluntary. pg3200.txt, pg100.txt +volunteer pg3200.txt +volunteer. pg3200.txt +volunteered pg3200.txt +volunteered. pg3200.txt +volunteering pg3200.txt +volunteering. pg3200.txt +volunteers pg31100.txt, pg3200.txt, pg100.txt +volunteers. pg31100.txt +volunteers." pg3200.txt +voluptuous pg3200.txt +voluptuousness, pg100.txt +voluto pg3200.txt +vomited pg3200.txt +von pg3200.txt +vor pg3200.txt +voracious pg3200.txt +vormittag pg3200.txt +vorzusprechen. pg3200.txt +votary pg3200.txt, pg100.txt +vote pg3200.txt +vote, pg3200.txt +vote--i pg3200.txt +vote. pg3200.txt +vote." pg3200.txt +vote.' pg3200.txt +voted pg3200.txt +voter's; pg3200.txt +voters pg3200.txt +voters, pg3200.txt +votes pg3200.txt +votes. pg3200.txt +votive pg3200.txt +vouch pg31100.txt, pg100.txt +vouch'd, pg100.txt +voucher pg100.txt +vouchsaf'd, pg100.txt +vouchsafe pg100.txt +vouchsafed pg31100.txt, pg3200.txt +vouchsafes pg100.txt +vouchsafing pg3200.txt +vous pg3200.txt, pg100.txt +voutsafe pg100.txt +vow pg3200.txt, pg100.txt +vow! pg100.txt +vow'd pg100.txt +vow, pg100.txt +vow- pg100.txt +vow. pg3200.txt, pg100.txt +vow: pg100.txt +vow; pg100.txt +vow? pg100.txt +vowed pg31100.txt, pg3200.txt +vowed! pg100.txt +vowels pg3200.txt +vowels, pg3200.txt +vowing pg3200.txt, pg100.txt +vows pg31100.txt, pg100.txt +vows, pg100.txt +vows. pg3200.txt, pg100.txt +vows." pg31100.txt +vows; pg31100.txt, pg100.txt +vows? pg100.txt +vox. pg100.txt +voyage pg3200.txt, pg100.txt +voyage!' pg3200.txt +voyage, pg3200.txt +voyage- pg100.txt +voyage--from pg3200.txt +voyage. pg3200.txt, pg100.txt +voyage; pg3200.txt, pg100.txt +voyaged pg3200.txt +voyages, pg3200.txt +vraiment. pg100.txt +vs. pg3200.txt +vu, pg3200.txt +vulgar pg31100.txt, pg3200.txt +vulgar, pg31100.txt, pg3200.txt +vulgar. pg31100.txt +vulgar." pg31100.txt +vulgar: pg100.txt +vulgarity pg31100.txt, pg3200.txt +vulgarity. pg31100.txt, pg3200.txt +vulgarity." pg3200.txt +vultures pg3200.txt +w---- pg3200.txt +w----stepped pg3200.txt +w-h-o--g-o-t--m-y--g-o-l-d-e-n--arm?" pg3200.txt +w. pg31100.txt, pg3200.txt +w." pg31100.txt +w., pg31100.txt +wa'n't pg3200.txt +wabbly; pg3200.txt +wad pg100.txt +waddled pg3200.txt +waddling pg3200.txt +wade pg3200.txt +wade. pg3200.txt +waded pg3200.txt +wading pg3200.txt +wads pg3200.txt +waeggis. pg3200.txt +wafer-cakes, pg100.txt +waffenstillstandsunterhandlungen. pg3200.txt +waffle-mold, pg3200.txt +waftage. pg100.txt +wafted pg31100.txt, pg3200.txt +wag'd pg100.txt +wag, pg3200.txt, pg100.txt +wag. pg100.txt +wag? pg100.txt +wage pg3200.txt +wager pg31100.txt +wager'd pg100.txt +wager, pg100.txt +wager? pg100.txt +wagers pg100.txt +wages pg3200.txt +wages, pg3200.txt +wages. pg3200.txt, pg100.txt +wages." pg3200.txt +wages.' pg3200.txt +wages: pg3200.txt +wages; pg3200.txt +wages? pg3200.txt +wagga-wagga pg3200.txt +wagged pg3200.txt +wagging pg3200.txt, pg100.txt +wagging. pg3200.txt +waggoner pg100.txt +wagner pg3200.txt +wagner's pg3200.txt +wagner's. pg3200.txt +wagnerian pg3200.txt +wagon pg3200.txt +wagon, pg3200.txt +wagon-load pg3200.txt +wagon. pg3200.txt +wagon; pg3200.txt +wagoner pg100.txt +wagons pg3200.txt +wagons, pg3200.txt +wagons--mainly pg3200.txt +wagons-loads pg3200.txt +wagons. pg3200.txt +wagons." pg3200.txt +wags pg3200.txt, pg100.txt +wags, pg3200.txt +wags; pg100.txt +wagtail? pg100.txt +wahr? pg3200.txt +wahrscheinlich pg3200.txt +waif pg3200.txt +waif." pg3200.txt +wail pg3200.txt, pg100.txt +wail'd, pg100.txt +wail, pg3200.txt, pg100.txt +wail. pg100.txt +wail; pg100.txt +wailing pg3200.txt +wailings. pg3200.txt +wails, pg3200.txt +wainscot; pg100.txt +waist pg3200.txt, pg100.txt +waist, pg3200.txt, pg100.txt +waist-band, pg3200.txt +waist-cloth pg3200.txt +waist-deep, pg3200.txt +waist. pg3200.txt +waistbelt. pg3200.txt +waistcoat! pg31100.txt +waistcoat, pg31100.txt +waistcoats pg3200.txt +waists pg3200.txt +waists. pg3200.txt +waists; pg3200.txt +wait pg31100.txt, pg3200.txt, pg100.txt +wait, pg31100.txt, pg3200.txt, pg100.txt +wait--and pg3200.txt +wait--put pg3200.txt +wait--starve pg3200.txt +wait--you pg3200.txt +wait--you'll pg3200.txt +wait. pg31100.txt, pg3200.txt, pg100.txt +wait." pg31100.txt, pg3200.txt +wait; pg3200.txt +waited pg31100.txt, pg3200.txt, pg100.txt +waited, pg3200.txt +waited--with pg3200.txt +waited. pg3200.txt +waited; pg3200.txt +waiter pg3200.txt +waiter, pg31100.txt +waiter. pg31100.txt +waiter: pg3200.txt +waiter; pg3200.txt +waiters pg3200.txt +waiters, pg31100.txt +waiting pg31100.txt, pg3200.txt +waiting!" pg3200.txt +waiting, pg3200.txt, pg100.txt +waiting--high pg3200.txt +waiting-maid pg100.txt +waiting-maids pg3200.txt +waiting-parlor pg3200.txt +waiting-room, pg3200.txt +waiting-room. pg3200.txt +waiting-vassals pg100.txt +waiting. pg31100.txt, pg3200.txt +waiting." pg3200.txt +waiting.' pg3200.txt +waiting: pg3200.txt +waiting; pg3200.txt +waitpinga pg3200.txt +waits pg3200.txt, pg100.txt +waits, pg100.txt +waits. pg3200.txt +waive pg3200.txt +wak'd, pg100.txt +wak'd. pg100.txt +wak'st, pg100.txt +wakatipu pg3200.txt +wakatipu, pg3200.txt +wake pg3200.txt, pg100.txt +wake! pg3200.txt +wake, pg3200.txt, pg100.txt +wake. pg3200.txt, pg100.txt +wake." pg3200.txt +wake] pg100.txt +waked pg3200.txt +waked, pg3200.txt +wakefield pg3200.txt, pg100.txt +wakeman, pg3200.txt +waken'd pg100.txt +wakened pg3200.txt +wakes pg3200.txt +wakes, pg100.txt +wakes. pg100.txt +waking pg3200.txt, pg100.txt +waking. pg100.txt +waking: pg100.txt +waking? pg100.txt +wakings pg3200.txt +wal, pg3200.txt +wales pg3200.txt, pg100.txt +wales! pg3200.txt +wales!" pg3200.txt +wales' pg3200.txt +wales, pg31100.txt, pg3200.txt, pg100.txt +wales- pg100.txt +wales. pg3200.txt, pg100.txt +wales; pg100.txt +wales? pg100.txt +wales?" pg3200.txt +walford, pg3200.txt +walk pg31100.txt, pg3200.txt, pg100.txt +walk! pg31100.txt +walk!" pg3200.txt +walk!'?" pg3200.txt +walk'd pg100.txt +walk'd, pg100.txt +walk, pg31100.txt, pg3200.txt, pg100.txt +walk- pg100.txt +walk--and pg3200.txt +walk-over. pg3200.txt +walk. pg31100.txt, pg3200.txt, pg100.txt +walk." pg31100.txt, pg3200.txt +walk.--and pg31100.txt +walk; pg31100.txt, pg3200.txt +walk? pg100.txt +walk?" pg3200.txt +walk?--they pg3200.txt +walked pg31100.txt, pg3200.txt +walked, pg31100.txt, pg3200.txt +walked. pg3200.txt +walked? pg100.txt +walker pg31100.txt +walker" pg3200.txt +walker, pg3200.txt +walking pg31100.txt, pg3200.txt, pg100.txt +walking, pg31100.txt, pg3200.txt +walking-distance pg3200.txt +walking-machines, pg3200.txt +walking-shoes, pg3200.txt +walking-shoes. pg3200.txt +walking. pg31100.txt +walking." pg31100.txt +walking; pg31100.txt +walks pg31100.txt, pg3200.txt, pg100.txt +walks, pg31100.txt, pg3200.txt, pg100.txt +walks. pg3200.txt, pg100.txt +wall pg3200.txt, pg100.txt +wall! pg100.txt +wall, pg31100.txt, pg3200.txt, pg100.txt +wall--clay pg3200.txt +wall--pious pg3200.txt +wall-eyed pg3200.txt +wall. pg3200.txt, pg100.txt +wall; pg3200.txt, pg100.txt +wallace, pg3200.txt +wallachians. pg3200.txt +wallack's. pg3200.txt +wallaroo pg3200.txt +walled pg3200.txt +wallet pg3200.txt +wallet. pg3200.txt +wallis pg31100.txt +wallis's pg31100.txt +wallis, pg31100.txt +wallis." pg31100.txt +wallis; pg31100.txt +wallis?" pg31100.txt +wallow pg3200.txt +wallowed pg3200.txt +walls pg31100.txt, pg3200.txt, pg100.txt +walls, pg3200.txt, pg100.txt +walls--portraits pg3200.txt +walls. pg3200.txt, pg100.txt +walls; pg3200.txt, pg100.txt +walls? pg100.txt +walnut-shell, pg100.txt +walnut; pg3200.txt +walruses. pg3200.txt +walter pg31100.txt, pg3200.txt, pg100.txt +walter" pg3200.txt +walter's pg31100.txt +walter's, pg31100.txt +walter) pg31100.txt +walter, pg31100.txt +walter- pg100.txt +walter--" pg31100.txt +walter. pg31100.txt, pg3200.txt +walter." pg31100.txt +walters pg3200.txt +waltz pg3200.txt +waltzed pg3200.txt +waltzes pg31100.txt +waltzing pg3200.txt +wampum pg3200.txt +wan pg3200.txt +wan? pg100.txt +wan?" pg3200.txt +wand pg3200.txt, pg100.txt +wand'ring pg100.txt +wand'ring, pg3200.txt +wand; pg100.txt +wander pg3200.txt, pg100.txt +wander, pg100.txt +wander. pg3200.txt, pg100.txt +wandered pg31100.txt, pg3200.txt +wanderer pg3200.txt +wanderer. pg100.txt +wanderers. pg3200.txt +wandering pg31100.txt, pg3200.txt +wandering, pg3200.txt +wandering--adventure pg3200.txt +wanderings pg3200.txt +wanderings, pg3200.txt +wanders pg3200.txt +wanders, pg3200.txt +wands, pg100.txt +wands; pg100.txt +waned. pg3200.txt +wanes pg3200.txt +wang pg3200.txt +wanganui. pg3200.txt +wangary pg3200.txt +wanilla pg3200.txt +waning pg3200.txt, pg100.txt +waning--then pg3200.txt +wann'd, pg100.txt +want pg31100.txt, pg3200.txt, pg100.txt +want! pg100.txt +want, pg31100.txt, pg3200.txt, pg100.txt +want- pg100.txt +want--" pg3200.txt +want--consideration, pg31100.txt +want-to-help-if-i-can. pg3200.txt +want. pg31100.txt, pg3200.txt, pg100.txt +want." pg31100.txt, pg3200.txt +want.) pg3200.txt +want; pg31100.txt +want? pg3200.txt, pg100.txt +want?" pg3200.txt +wantage. pg3200.txt +wanted pg31100.txt, pg3200.txt, pg100.txt +wanted, pg31100.txt, pg3200.txt +wanted--i pg3200.txt +wanted--when pg3200.txt +wanted. pg31100.txt, pg3200.txt, pg100.txt +wanted." pg31100.txt, pg3200.txt +wanted.-- pg31100.txt +wanted: pg3200.txt +wanted; pg31100.txt, pg3200.txt +wanted;" pg31100.txt +wanted? pg3200.txt +wanted?" pg31100.txt +wanteth pg3200.txt +wanting pg31100.txt, pg3200.txt +wanting, pg31100.txt, pg3200.txt, pg100.txt +wanting. pg31100.txt, pg3200.txt, pg100.txt +wanting." pg31100.txt +wanting; pg31100.txt +wanting?" pg3200.txt +wanton pg3200.txt, pg100.txt +wanton, pg3200.txt +wanton. pg100.txt +wantonly pg3200.txt +wantonly, pg100.txt +wantonly. pg3200.txt +wantonness pg31100.txt +wantonness! pg100.txt +wantonness, pg100.txt +wantonness. pg100.txt +wants pg31100.txt, pg3200.txt, pg100.txt +wants, pg3200.txt, pg100.txt +wants- pg100.txt +wants. pg3200.txt +wants." pg3200.txt +wants; pg100.txt +wants?" pg3200.txt +war pg31100.txt, pg3200.txt, pg100.txt +war! pg3200.txt, pg100.txt +war!!! pg3200.txt +war'- pg100.txt +war's pg100.txt +war, pg31100.txt, pg3200.txt, pg100.txt +war," pg3200.txt +war- pg3200.txt, pg100.txt +war--and pg3200.txt +war--therefore pg3200.txt +war-club, pg3200.txt +war-horse pg3200.txt +war-march pg3200.txt +war-monuments--wellington pg3200.txt +war-path pg3200.txt +war-prices pg3200.txt +war-proof- pg100.txt +war-steed. pg3200.txt +war-sword--good pg3200.txt +war-talk pg3200.txt +war-thoughts pg100.txt +war-tribe pg3200.txt +war-whoop, pg3200.txt +war-work pg3200.txt +war-worn pg3200.txt +war. pg31100.txt, pg3200.txt, pg100.txt +war." pg3200.txt +war.' pg3200.txt +war; pg3200.txt, pg100.txt +war? pg3200.txt, pg100.txt +war?" pg3200.txt +warble, pg3200.txt, pg100.txt +warbling." pg3200.txt +ward pg3200.txt, pg100.txt +ward!"--and pg3200.txt +ward" pg3200.txt +ward's pg3200.txt +ward, pg31100.txt, pg3200.txt, pg100.txt +ward. pg3200.txt, pg100.txt +ward." pg3200.txt +ward; pg100.txt +warded pg100.txt +wardrobe pg3200.txt +wardrobe. pg3200.txt, pg100.txt +wards pg100.txt +wards, pg100.txt +ware pg3200.txt, pg100.txt +ware! pg3200.txt +ware, pg100.txt +ware-a. pg100.txt +warehouse." pg31100.txt +warehoused pg3200.txt +warehouses pg3200.txt +warehouses, pg31100.txt +waren-- pg3200.txt +wares pg100.txt +wares, pg3200.txt +wares? pg100.txt +warest! pg3200.txt +warfare pg3200.txt +warfare, pg3200.txt +warfare. pg31100.txt +warily pg3200.txt, pg100.txt +warily, pg3200.txt +warily. pg3200.txt +warily." pg3200.txt +warlike pg3200.txt, pg100.txt +warlike, pg100.txt +warlike. pg3200.txt +warm pg31100.txt, pg3200.txt, pg100.txt +warm! pg100.txt +warm"-- pg31100.txt +warm, pg31100.txt, pg3200.txt, pg100.txt +warm," pg3200.txt +warm-hearted pg31100.txt +warm-hearted, pg3200.txt +warm-up pg3200.txt +warm. pg3200.txt, pg100.txt +warm; pg3200.txt +warmed pg31100.txt, pg3200.txt, pg100.txt +warmed, pg31100.txt, pg100.txt +warmest pg31100.txt +warming pg3200.txt +warming-pan, pg3200.txt +warming-pan. pg100.txt +warming. pg3200.txt +warmly pg31100.txt, pg3200.txt +warmly, pg31100.txt +warmly. pg31100.txt, pg3200.txt +warmly." pg31100.txt +warmly; pg31100.txt +warms pg100.txt +warmth pg31100.txt +warmth, pg31100.txt +warmth-- pg31100.txt +warmth. pg31100.txt, pg3200.txt +warmth: pg31100.txt, pg3200.txt +warmth; pg31100.txt +warn pg31100.txt, pg3200.txt +warn'd. pg100.txt +warn't pg3200.txt +warn't. pg3200.txt +warn't; pg3200.txt +warn- pg3200.txt +warned pg3200.txt +warned, pg3200.txt +warned." pg3200.txt +warner pg3200.txt +warner's. pg3200.txt +warner) pg3200.txt +warner, pg3200.txt +warners, pg3200.txt +warning pg3200.txt, pg100.txt +warning, pg3200.txt, pg100.txt +warning-- pg3200.txt +warning. pg31100.txt, pg3200.txt, pg100.txt +warning." pg3200.txt +warnings pg3200.txt +warnings. pg100.txt +warns pg3200.txt +warp, pg100.txt +warp. pg100.txt +warpath." pg3200.txt +warrant pg3200.txt, pg100.txt +warrant, pg100.txt +warrant. pg3200.txt, pg100.txt +warrant." pg3200.txt +warrant; pg100.txt +warrant] pg100.txt +warranted pg3200.txt, pg100.txt +warranted. pg3200.txt +warrantize. pg100.txt +warrants pg3200.txt +warrants. pg100.txt +warranty pg100.txt +warren pg3200.txt +warrener. pg100.txt +warrikshr.) pg3200.txt +warrimo--steamer pg3200.txt +warrior pg3200.txt +warrior! pg100.txt +warrior, pg3200.txt, pg100.txt +warrior. pg100.txt +warrior; pg100.txt +warriors, pg3200.txt, pg100.txt +warriors. pg100.txt +warriors.' pg3200.txt +warriors; pg3200.txt, pg100.txt +wars pg31100.txt, pg3200.txt, pg100.txt +wars! pg100.txt +wars, pg3200.txt, pg100.txt +wars. pg3200.txt, pg100.txt +wars." pg3200.txt +wars; pg100.txt +wars? pg100.txt +warships pg3200.txt +wart pg3200.txt, pg100.txt +wart! pg100.txt +wart. pg100.txt +wart." pg3200.txt +wart; pg100.txt +wart? pg100.txt +warts,' pg3200.txt +warwhoop pg3200.txt +warwick pg3200.txt, pg100.txt +warwick! pg100.txt +warwick!' pg100.txt +warwick) pg100.txt +warwick, pg3200.txt, pg100.txt +warwick. pg100.txt +warwick; pg100.txt +warwick? pg100.txt +warwick] pg100.txt +warwickshire pg100.txt +warwickshire, pg100.txt +warwickshire. pg3200.txt +wary pg3200.txt +wary, pg31100.txt, pg3200.txt, pg100.txt +wary. pg100.txt +was! pg31100.txt, pg3200.txt, pg100.txt +was!" pg31100.txt, pg3200.txt +was!' pg3200.txt +was's pg3200.txt +was't pg100.txt +was't? pg100.txt +was)--failed pg3200.txt +was, pg31100.txt, pg3200.txt, pg100.txt +was," pg3200.txt +was- pg100.txt +was-- pg31100.txt, pg3200.txt +was--" pg3200.txt +was--"it pg3200.txt +was--' pg3200.txt +was--but pg31100.txt +was--first--to pg3200.txt +was--m-o-r-t-a-l--of--of--w-i-l-l-i-a-m--w-h-e--' pg3200.txt +was--no, pg3200.txt +was--or pg3200.txt +was--restitution. pg3200.txt +was--tacitly, pg31100.txt +was--was--it pg3200.txt +was. pg31100.txt, pg3200.txt, pg100.txt +was." pg31100.txt, pg3200.txt +was.' pg3200.txt, pg100.txt +was: pg31100.txt, pg3200.txt +was; pg31100.txt, pg3200.txt, pg100.txt +was;--but pg3200.txt +was? pg3200.txt, pg100.txt +was?" pg31100.txt, pg3200.txt +was?' pg3200.txt +wash pg3200.txt, pg100.txt +wash'd pg100.txt +wash, pg3200.txt, pg100.txt +wash-basin, pg3200.txt +wash-basins pg3200.txt +wash-bowl pg3200.txt +wash-list, pg3200.txt +wash-stand, pg3200.txt +wash-tub pg3200.txt +wash-up. pg3200.txt +wash. pg3200.txt +wash." pg3200.txt +wash; pg100.txt +washboard. pg3200.txt +washed pg3200.txt +washed-out. pg3200.txt +washing pg31100.txt, pg3200.txt +washing, pg3200.txt +washing-bill pg31100.txt +washing. pg3200.txt +washing." pg3200.txt +washing; pg100.txt +washings pg3200.txt +washington pg3200.txt +washington" pg3200.txt +washington' pg3200.txt +washington's pg3200.txt +washington, pg3200.txt +washington--i pg3200.txt +washington--i'm pg3200.txt +washington. pg3200.txt +washington." pg3200.txt +washington: pg3200.txt +washington; pg3200.txt +washington?" pg3200.txt +washoe pg3200.txt +washoe!" pg3200.txt +washstand pg3200.txt +wasn't pg3200.txt +wasn't), pg3200.txt +wasn't, pg3200.txt +wasn't. pg3200.txt +wasn't." pg3200.txt +wasn't; pg3200.txt +waspish. pg100.txt +wasps; pg100.txt +wasser, pg3200.txt +wast pg100.txt +wast, pg100.txt +waste pg3200.txt, pg100.txt +waste, pg3200.txt, pg100.txt +waste--got pg3200.txt +waste. pg3200.txt, pg100.txt +waste." pg3200.txt +waste: pg100.txt +waste; pg100.txt +wastebasket, pg3200.txt +wasted pg31100.txt, pg3200.txt, pg100.txt +wasted, pg100.txt +wasted. pg3200.txt +wasted.] pg3200.txt +wasted; pg3200.txt +wasteful pg3200.txt +wastes pg3200.txt, pg100.txt +wasting pg31100.txt, pg3200.txt +watch pg31100.txt, pg3200.txt, pg100.txt +watch! pg3200.txt +watch'd pg100.txt +watch'd, pg100.txt +watch'd. pg100.txt +watch'd; pg100.txt +watch) pg3200.txt +watch, pg3200.txt, pg100.txt +watch- pg3200.txt +watch--"and pg31100.txt +watch-and-watch, pg3200.txt +watch-chain pg3200.txt +watch-guard pg3200.txt +watch-out. pg3200.txt +watch. pg3200.txt, pg100.txt +watch." pg31100.txt, pg3200.txt +watch.' pg3200.txt +watch.] pg100.txt +watch; pg31100.txt, pg3200.txt, pg100.txt +watch? pg100.txt +watched pg31100.txt, pg3200.txt +watched--in pg3200.txt +watched. pg3200.txt +watched; pg3200.txt +watcher pg3200.txt +watcher." pg3200.txt +watchers pg3200.txt +watches pg3200.txt +watches' pg3200.txt +watches, pg31100.txt, pg3200.txt +watches. pg100.txt +watchful pg31100.txt, pg3200.txt +watchful--won't pg3200.txt +watchfully pg3200.txt +watchfulness pg31100.txt, pg3200.txt +watchfulness, pg3200.txt +watchfulness. pg3200.txt +watching pg31100.txt, pg3200.txt, pg100.txt +watching, pg31100.txt, pg3200.txt +watching--i've pg3200.txt +watching--wouldn't pg3200.txt +watching. pg3200.txt, pg100.txt +watchings. pg3200.txt, pg100.txt +watchmaker. pg3200.txt +watchman pg31100.txt, pg3200.txt +watchman, pg100.txt +watchman. pg3200.txt, pg100.txt +watchmen pg100.txt +water pg31100.txt, pg3200.txt, pg100.txt +water! pg3200.txt +water!" pg3200.txt +water" pg3200.txt +water"--"falling pg3200.txt +water's pg3200.txt +water, pg31100.txt, pg3200.txt, pg100.txt +water- pg3200.txt +water--and pg3200.txt +water--so pg3200.txt +water--what pg3200.txt +water-butts.' pg3200.txt +water-canteens pg3200.txt +water-carrier pg3200.txt +water-colors; pg3200.txt +water-drip. pg3200.txt +water-flies pg100.txt +water-jars pg3200.txt +water-lilies. pg3200.txt +water-moccasins." pg3200.txt +water-nothing pg3200.txt +water-nymph pg100.txt +water-pots, pg3200.txt +water-spout pg3200.txt +water-spout, pg3200.txt +water-supply pg3200.txt +water-thieves pg100.txt +water-wheels; pg3200.txt +water-works. pg3200.txt +water. pg31100.txt, pg3200.txt, pg100.txt +water." pg3200.txt +water.' pg3200.txt +water; pg3200.txt, pg100.txt +water? pg31100.txt, pg3200.txt, pg100.txt +water?" pg3200.txt +water?--drink pg3200.txt +water] pg100.txt +waterbury pg3200.txt +waterbury. pg3200.txt +waterbury." pg3200.txt +watered pg3200.txt +waterfall, pg3200.txt +waterfalls pg3200.txt +waterfly? pg100.txt +watering pg31100.txt +watering-place. pg3200.txt +waterloo, pg3200.txt +waterman pg3200.txt +watermelon pg3200.txt +watermelon). pg3200.txt +watermelon--down pg3200.txt +watermelon-patches pg3200.txt +watermelon. pg3200.txt +watermelon." pg3200.txt +watermelon; pg3200.txt +watermelons, pg3200.txt +waterpots, pg100.txt +waters pg31100.txt, pg3200.txt, pg100.txt +waters, pg3200.txt, pg100.txt +waters--a pg3200.txt +waters. pg3200.txt, pg100.txt +waters; pg3200.txt +waterway pg3200.txt +watery pg3200.txt +watson pg3200.txt +watson's pg31100.txt, pg3200.txt +watson--they pg3200.txt +watson; pg3200.txt +wattle, pg3200.txt +wattled pg3200.txt +wauganui--plenty pg3200.txt +wave pg31100.txt, pg3200.txt, pg100.txt +wave, pg3200.txt, pg100.txt +wave. pg3200.txt +wave." pg3200.txt +waved pg3200.txt +waved, pg3200.txt +waveless pg3200.txt +waver. pg3200.txt +wavered, pg3200.txt +wavered; pg31100.txt +wavering pg3200.txt +wavering, pg3200.txt +wavering--wavering. pg3200.txt +wavers! pg3200.txt +waves pg3200.txt, pg100.txt +waves, pg3200.txt +waves. pg3200.txt, pg100.txt +waving pg3200.txt +waw pg3200.txt +waw!"' pg3200.txt +waw,' pg3200.txt +waw-he!" pg3200.txt +waw-he!--waw-he!--waw-he!" pg3200.txt +waw-he!-waw pg3200.txt +waw. pg100.txt +wawhoo." pg3200.txt +wawtah,' pg3200.txt +wax pg3200.txt, pg100.txt +wax, pg3200.txt, pg100.txt +wax-figger!" pg3200.txt +wax-figger." pg3200.txt +wax-work. pg3200.txt +wax. pg3200.txt, pg100.txt +wax; pg3200.txt, pg100.txt +waxed pg3200.txt +waxen pg3200.txt +waxes, pg100.txt +way pg31100.txt, pg3200.txt, pg100.txt +way! pg3200.txt, pg100.txt +way!" pg31100.txt, pg3200.txt +way!--and pg31100.txt +way' pg3200.txt +way); pg3200.txt +way, pg31100.txt, pg3200.txt, pg100.txt +way,) pg3200.txt +way- pg100.txt +way-- pg3200.txt +way--" pg3200.txt +way--"there pg3200.txt +way--among pg3200.txt +way--and pg31100.txt, pg3200.txt +way--but pg3200.txt +way--by pg3200.txt +way--faith, pg3200.txt +way--it pg3200.txt +way--said pg3200.txt +way--say pg3200.txt +way--some pg3200.txt +way--sweeping pg3200.txt +way--that pg3200.txt +way--when pg3200.txt +way--you pg31100.txt +way-station pg3200.txt +way-station. pg3200.txt +way-worn pg3200.txt +way. pg31100.txt, pg3200.txt, pg100.txt +way." pg31100.txt, pg3200.txt +way.' pg3200.txt +way.--blast pg3200.txt +way.--now pg3200.txt +way.... pg3200.txt +way.] pg3200.txt +way: pg3200.txt, pg100.txt +way; pg31100.txt, pg3200.txt, pg100.txt +way? pg31100.txt, pg3200.txt, pg100.txt +way?" pg3200.txt +way?' pg3200.txt +wayfarer. pg3200.txt +wayfarers pg3200.txt +wayfarers. pg3200.txt +wayfarers; pg3200.txt +waylaid pg3200.txt +waylays pg3200.txt +wayne pg3200.txt +ways pg31100.txt, pg3200.txt, pg100.txt +ways! pg100.txt +ways" pg3200.txt +ways) pg3200.txt +ways, pg31100.txt, pg3200.txt, pg100.txt +ways," pg3200.txt +ways--" pg3200.txt +ways--every pg3200.txt +ways. pg31100.txt, pg3200.txt, pg100.txt +ways." pg31100.txt, pg3200.txt +ways..... pg3200.txt +ways: pg3200.txt +ways; pg3200.txt, pg100.txt +ways? pg3200.txt, pg100.txt +ways?" pg3200.txt +ways?' pg3200.txt +ways] pg100.txt +wayside, pg3200.txt +we! pg100.txt +we'd pg3200.txt +we'll pg3200.txt, pg100.txt +we're pg3200.txt +we've pg3200.txt +we, pg3200.txt, pg100.txt +we--" pg3200.txt +we--' pg3200.txt +we--which pg3200.txt +we-are-justified-in- pg3200.txt +we-are-warranted-in-believingers, pg3200.txt +we-no-na pg3200.txt +we. pg3200.txt, pg100.txt +we: pg100.txt +we? pg100.txt +we?" pg31100.txt, pg3200.txt +weak pg31100.txt, pg3200.txt, pg100.txt +weak, pg31100.txt, pg3200.txt, pg100.txt +weak- pg100.txt +weak--lately; pg3200.txt +weak--puerile. pg3200.txt +weak-kneed pg3200.txt +weak-minded; pg3200.txt +weak-spirited, pg31100.txt +weak. pg3200.txt, pg100.txt +weak.- pg100.txt +weak; pg100.txt +weaken pg31100.txt, pg3200.txt +weaken, pg31100.txt +weakened pg31100.txt, pg3200.txt +weakened. pg31100.txt, pg3200.txt +weakening pg3200.txt +weaker pg3200.txt, pg100.txt +weaker, pg3200.txt +weaker. pg3200.txt +weakling pg3200.txt +weakling, pg3200.txt +weakly; pg31100.txt +weakness pg31100.txt, pg3200.txt, pg100.txt +weakness! pg3200.txt, pg100.txt +weakness, pg31100.txt, pg3200.txt, pg100.txt +weakness. pg3200.txt, pg100.txt +weakness; pg31100.txt +weaknesses pg3200.txt +weaknesses. pg3200.txt +weal, pg100.txt +weal. pg100.txt +weal; pg100.txt +wealth pg31100.txt, pg3200.txt, pg100.txt +wealth, pg31100.txt, pg3200.txt, pg100.txt +wealth--a pg3200.txt +wealth--or pg3200.txt +wealth. pg31100.txt, pg3200.txt, pg100.txt +wealth." pg31100.txt, pg3200.txt +wealth.' pg3200.txt +wealth?' pg3200.txt +wealthier pg3200.txt +wealthiest pg3200.txt, pg100.txt +wealthy pg31100.txt, pg3200.txt +wealthy, pg3200.txt +wealthy. pg3200.txt +wealthy; pg100.txt +wean pg3200.txt +weaned. pg3200.txt +weapon pg3200.txt, pg100.txt +weapon'd; pg100.txt +weapon, pg3200.txt, pg100.txt +weapon. pg3200.txt, pg100.txt +weapon." pg3200.txt +weapon; pg100.txt +weapon? pg100.txt +weapon?" pg3200.txt +weaponless." pg3200.txt +weapons pg3200.txt, pg100.txt +weapons! pg100.txt +weapons, pg3200.txt, pg100.txt +weapons--just pg3200.txt +weapons. pg3200.txt, pg100.txt +weapons; pg3200.txt +wear pg31100.txt, pg3200.txt, pg100.txt +wear!" pg3200.txt +wear'st pg100.txt +wear'st! pg100.txt +wear, pg3200.txt, pg100.txt +wear-a? pg100.txt +wear. pg3200.txt, pg100.txt +wear: pg100.txt +wear; pg100.txt +wear? pg100.txt +wearer pg3200.txt +wearer! pg100.txt +wearer. pg3200.txt +wearer? pg3200.txt +wearers. pg3200.txt +wearied pg31100.txt, pg3200.txt +wearied. pg3200.txt +wearily. pg100.txt +wearily: pg3200.txt +weariness pg3200.txt, pg100.txt +weariness, pg31100.txt, pg3200.txt +weariness. pg3200.txt +weariness." pg3200.txt +wearinesses, pg3200.txt +wearing pg31100.txt, pg3200.txt, pg100.txt +wearing, pg3200.txt +wearing. pg100.txt +wearing? pg100.txt +wearisome pg3200.txt +wearisome, pg3200.txt +wearisome; pg100.txt +wears pg3200.txt, pg100.txt +wears, pg3200.txt +wears; pg100.txt +weary pg31100.txt, pg3200.txt, pg100.txt +weary, pg3200.txt, pg100.txt +weary. pg31100.txt, pg100.txt +weary? pg100.txt +wearying pg3200.txt +weasel pg31100.txt +weasel,' pg3200.txt +weasel. pg100.txt +weather pg31100.txt, pg3200.txt, pg100.txt +weather! pg3200.txt +weather!" pg31100.txt, pg3200.txt +weather, pg31100.txt, pg3200.txt +weather--" pg3200.txt +weather--omissions pg3200.txt +weather-beaten pg3200.txt +weather-gaskets. pg3200.txt +weather-vane pg3200.txt +weather. pg31100.txt, pg3200.txt, pg100.txt +weather." pg31100.txt, pg3200.txt +weather; pg31100.txt, pg3200.txt, pg100.txt +weather? pg100.txt +weathercock? pg100.txt +weathers. pg3200.txt, pg100.txt +weave pg3200.txt +weaver pg3200.txt, pg100.txt +weaver, pg100.txt +weaver- pg100.txt +weaver. pg3200.txt, pg100.txt +weaver; pg3200.txt +weaver? pg100.txt +weavers, pg100.txt +weavers. pg3200.txt +weaves pg3200.txt +weaving pg3200.txt +weaving, pg3200.txt +web pg3200.txt, pg100.txt +web-work pg3200.txt +web. pg3200.txt +web; pg100.txt +web? pg100.txt +webb pg3200.txt +webs. pg3200.txt +webster pg3200.txt +webster's pg3200.txt +webster, pg3200.txt +webster. pg3200.txt +wed pg100.txt +wed! pg100.txt +wed'l. pg3200.txt +wed, pg100.txt +wed. pg100.txt +wed.' pg100.txt +wed; pg100.txt +wedded pg3200.txt +wedded, pg3200.txt, pg100.txt +wedded; pg100.txt +wedding pg31100.txt, pg3200.txt, pg100.txt +wedding, pg3200.txt +wedding--a pg3200.txt +wedding-cake, pg31100.txt +wedding-day pg31100.txt +wedding-day, pg100.txt +wedding-day. pg100.txt +wedding-day? pg100.txt +wedding-dow'r; pg100.txt +wedding-gown. pg31100.txt +wedding-ring, pg100.txt +wedding. pg31100.txt, pg3200.txt, pg100.txt +wedding." pg31100.txt, pg3200.txt +wedding: pg100.txt +weddings pg3200.txt +weddings, pg3200.txt +wedge, pg3200.txt +wedlock, pg3200.txt +wedlock. pg3200.txt +wedlock? pg100.txt +wednesday pg31100.txt, pg3200.txt +wednesday, pg31100.txt, pg3200.txt +wednesday. pg31100.txt, pg3200.txt +wednesday." pg31100.txt +wee pg3200.txt +weed pg3200.txt, pg100.txt +weed, pg100.txt +weed-garden pg3200.txt +weed-grown pg3200.txt +weed-grown, pg3200.txt +weed. pg3200.txt +weed; pg3200.txt, pg100.txt +weed?" pg3200.txt +weeding pg3200.txt +weeding. pg100.txt +weeds pg3200.txt, pg100.txt +weeds! pg100.txt +weeds, pg100.txt +weeds. pg100.txt +weeds: pg100.txt +weeds; pg100.txt +weeds]. pg100.txt +week pg31100.txt, pg3200.txt, pg100.txt +week! pg100.txt +week!" pg31100.txt, pg3200.txt +week's pg31100.txt, pg3200.txt +week, pg31100.txt, pg3200.txt, pg100.txt +week-- pg3200.txt +week--and"--well, pg3200.txt +week--lovely, pg3200.txt +week--occasionally pg3200.txt +week--the pg3200.txt +week-day pg3200.txt +week-days. pg3200.txt +week. pg31100.txt, pg3200.txt, pg100.txt +week." pg31100.txt, pg3200.txt +week; pg31100.txt, pg3200.txt, pg100.txt +week? pg3200.txt +week?" pg3200.txt +weeke! pg100.txt +weekly pg3200.txt +weekly. pg3200.txt +weekly: pg3200.txt +weeks pg31100.txt, pg3200.txt, pg100.txt +weeks! pg3200.txt +weeks' pg31100.txt, pg3200.txt +weeks, pg31100.txt, pg3200.txt, pg100.txt +weeks--all pg3200.txt +weeks--i'll pg3200.txt +weeks--i've pg3200.txt +weeks--rehabilitate pg3200.txt +weeks. pg31100.txt, pg3200.txt +weeks." pg31100.txt, pg3200.txt +weeks.' pg3200.txt +weeks.'" pg3200.txt +weeks.] pg3200.txt +weeks; pg31100.txt, pg3200.txt +ween. pg3200.txt +ween." pg3200.txt +weep pg31100.txt, pg3200.txt, pg100.txt +weep'st pg100.txt +weep, pg3200.txt, pg100.txt +weep- pg100.txt +weep--in pg31100.txt +weep. pg3200.txt, pg100.txt +weep." pg3200.txt +weep; pg3200.txt, pg100.txt +weep? pg100.txt +weeping pg3200.txt, pg100.txt +weeping! pg100.txt +weeping, pg3200.txt, pg100.txt +weeping. pg100.txt +weeping; pg100.txt +weeping? pg100.txt +weepon!" pg3200.txt +weeps pg3200.txt, pg100.txt +weeps! pg100.txt +weeps. pg100.txt +weeps; pg100.txt +weet pg100.txt +weet-weet, pg3200.txt +weet-weeters. pg3200.txt +wege? pg3200.txt +wegen--aber pg3200.txt +weggis, pg3200.txt +weibertreu--wife's pg3200.txt +weigh pg3200.txt, pg100.txt +weigh'd pg100.txt +weigh'd, pg100.txt +weigh'd. pg100.txt +weigh- pg100.txt +weigh: pg100.txt +weighed pg3200.txt +weighed, pg3200.txt +weighing. pg100.txt +weighs pg100.txt +weight pg31100.txt, pg3200.txt, pg100.txt +weight! pg3200.txt +weight, pg3200.txt, pg100.txt +weight- pg100.txt +weight. pg31100.txt, pg3200.txt, pg100.txt +weight." pg31100.txt +weight; pg100.txt +weighte pg3200.txt +weightier pg3200.txt +weightier." pg3200.txt +weightily, pg3200.txt +weightless pg3200.txt, pg100.txt +weighty pg3200.txt +weighty. pg100.txt +weil pg3200.txt +weird pg3200.txt +weird-looking pg3200.txt +welch pg31100.txt +welch, pg3200.txt +welche, pg3200.txt +welcome pg31100.txt, pg3200.txt, pg100.txt +welcome! pg100.txt +welcome!" pg3200.txt +welcome, pg31100.txt, pg3200.txt, pg100.txt +welcome," pg3200.txt +welcome- pg100.txt +welcome--and pg3200.txt +welcome--explosion pg3200.txt +welcome--good-by pg3200.txt +welcome-home pg3200.txt +welcome. pg31100.txt, pg3200.txt, pg100.txt +welcome: pg31100.txt, pg3200.txt +welcome; pg31100.txt, pg3200.txt, pg100.txt +welcome? pg100.txt +welcomed pg31100.txt, pg3200.txt +welcomed, pg3200.txt +welcomed--but pg3200.txt +welcomed. pg31100.txt +welcomes pg3200.txt +welcomes! pg100.txt +welcomes, pg100.txt +welcomes; pg100.txt +welcomest pg3200.txt +welcoming pg3200.txt +welcomings, pg3200.txt +welded pg3200.txt +welfare pg31100.txt, pg3200.txt, pg100.txt +welfare, pg100.txt +welfare. pg31100.txt +welkin's pg3200.txt +welkin." pg3200.txt +welkin?" pg3200.txt +well pg31100.txt, pg3200.txt, pg100.txt +well! pg31100.txt, pg3200.txt, pg100.txt +well!" pg3200.txt +well, pg31100.txt, pg3200.txt, pg100.txt +well," pg31100.txt, pg3200.txt +well,' pg31100.txt +well,--she pg31100.txt +well- pg3200.txt, pg100.txt +well--" pg31100.txt +well--after pg3200.txt +well--and pg3200.txt +well--but pg3200.txt +well--couldn't pg3200.txt +well--good pg3200.txt +well--haven't pg3200.txt +well--hey?" pg3200.txt +well--i pg31100.txt +well--never pg3200.txt +well--no. pg3200.txt +well--of pg3200.txt +well--only pg31100.txt +well--something pg3200.txt +well--that pg3200.txt +well--yes. pg3200.txt +well--you pg3200.txt +well-a-well, pg3200.txt +well-advis'd? pg100.txt +well-and pg100.txt +well-arranged pg3200.txt +well-behaved. pg3200.txt +well-belov'd pg100.txt +well-belov'd, pg100.txt +well-beloved pg3200.txt +well-bred pg31100.txt +well-bred, pg31100.txt +well-connected pg31100.txt +well-deserving, pg3200.txt +well-disposed, pg31100.txt +well-dressed pg3200.txt +well-dressed, pg31100.txt, pg3200.txt +well-drilled. pg3200.txt +well-equipped, pg3200.txt +well-favour'd pg100.txt +well-favour'd. pg100.txt +well-fed pg3200.txt +well-flower'd. pg100.txt +well-grounded pg31100.txt +well-judging, pg31100.txt +well-known pg31100.txt, pg3200.txt +well-looking, pg31100.txt +well-meaning pg3200.txt +well-meaning, pg31100.txt +well-meaning. pg3200.txt +well-nigh pg3200.txt +well-posted pg3200.txt +well-proportioned, pg3200.txt +well-read pg31100.txt +well-simulated pg3200.txt +well-spring pg3200.txt +well-to-do pg3200.txt +well-to-do, pg3200.txt +well-wisher pg3200.txt +well-wishers pg31100.txt +well-wishers; pg3200.txt +well-worn pg3200.txt +well. pg31100.txt, pg3200.txt, pg100.txt +well." pg31100.txt, pg3200.txt +well.' pg3200.txt, pg100.txt +well.'" pg31100.txt +well.- pg100.txt +well.--i pg31100.txt +well: pg31100.txt, pg3200.txt, pg100.txt +well:--a pg31100.txt +well; pg31100.txt, pg3200.txt, pg100.txt +well? pg31100.txt, pg3200.txt, pg100.txt +well?" pg31100.txt, pg3200.txt +well?' pg3200.txt +wellhorn pg3200.txt +wellington pg3200.txt +wellington, pg3200.txt +wellingtons, pg3200.txt +wells pg3200.txt +wells, pg100.txt +wells," pg31100.txt +wells-fargo pg3200.txt +wells-fargo." pg3200.txt +wells-fargo?" pg3200.txt +wells. pg3200.txt +wellsville pg3200.txt +welsh pg100.txt +welsh, pg100.txt +welsh. pg100.txt +welsh." pg3200.txt +welsh; pg100.txt +welshman pg100.txt +welshman. pg100.txt +welshman; pg100.txt +welshmen, pg100.txt +wench pg100.txt +wench! pg100.txt +wench!" pg3200.txt +wench's pg3200.txt +wench, pg100.txt +wench. pg3200.txt, pg100.txt +wench.' pg100.txt +wench; pg100.txt +wench? pg100.txt +wenches, pg100.txt +wenches. pg100.txt +wenches? pg100.txt +wenching pg100.txt +wend pg100.txt +wend, pg100.txt +wend." pg3200.txt +wendell pg3200.txt +wending pg3200.txt +wends pg3200.txt +wenn pg3200.txt +went pg31100.txt, pg3200.txt, pg100.txt +went! pg3200.txt +went, pg31100.txt, pg3200.txt, pg100.txt +went- pg100.txt +went--that pg3200.txt +went. pg31100.txt, pg3200.txt, pg100.txt +went." pg3200.txt +went: pg100.txt +went; pg3200.txt +went?" pg3200.txt +wentworth pg31100.txt +wentworth's pg31100.txt +wentworth, pg31100.txt +wentworth-- pg31100.txt +wentworth. pg31100.txt +wentworth." pg31100.txt +wentworth; pg31100.txt +wept pg3200.txt, pg100.txt +wept, pg3200.txt, pg100.txt +wept. pg3200.txt +wept." pg3200.txt +wept; pg3200.txt, pg100.txt +werde pg3200.txt +were! pg3200.txt, pg100.txt +were!" pg31100.txt +were, pg31100.txt, pg3200.txt, pg100.txt +were,--funny pg3200.txt +were- pg100.txt +were-- pg3200.txt +were--" pg3200.txt +were--and pg3200.txt +were--but pg3200.txt +were--in pg3200.txt +were--was pg3200.txt +were. pg31100.txt, pg3200.txt, pg100.txt +were." pg31100.txt, pg3200.txt +were.' pg3200.txt +were.--she pg31100.txt +were: pg3200.txt +were; pg31100.txt, pg3200.txt, pg100.txt +were? pg100.txt +were?" pg3200.txt +weren't pg3200.txt +wert pg3200.txt, pg100.txt +wert. pg100.txt +werter pg3200.txt +west pg31100.txt, pg3200.txt, pg100.txt +west! pg100.txt +west" pg3200.txt +west, pg3200.txt, pg100.txt +west," pg3200.txt +west--" pg3200.txt +west--what's pg3200.txt +west. pg3200.txt, pg100.txt +west." pg31100.txt, pg3200.txt +west; pg3200.txt +west? pg100.txt +west?' pg3200.txt +westbrook, pg3200.txt +westchester pg3200.txt +westerly, pg3200.txt +western pg31100.txt, pg3200.txt +western. pg3200.txt +westerner pg3200.txt +westminster pg3200.txt, pg100.txt +westminster, pg100.txt +westminster. pg3200.txt, pg100.txt +westminster; pg3200.txt, pg100.txt +westmoreland pg100.txt +westmoreland, pg100.txt +westmoreland. pg100.txt +westmoreland.] pg100.txt +westmoreland; pg100.txt +westmoreland? pg100.txt +weston pg31100.txt +weston's pg31100.txt +weston, pg31100.txt +weston," pg31100.txt +weston--no pg31100.txt +weston. pg31100.txt +weston." pg31100.txt +weston.--"very pg31100.txt +weston.] pg31100.txt +weston; pg31100.txt +westons pg31100.txt +westons--who pg31100.txt +westons. pg31100.txt +westward pg3200.txt, pg100.txt +westward-ho! pg100.txt +westward. pg3200.txt +westward?" pg31100.txt +wet pg31100.txt, pg3200.txt, pg100.txt +wet, pg3200.txt, pg100.txt +wet-nurse pg3200.txt +wet-nurse. pg3200.txt +wet-nurse? pg3200.txt +wet. pg3200.txt, pg100.txt +wet." pg31100.txt, pg3200.txt +wet; pg3200.txt, pg100.txt +wet? pg3200.txt +wetherby? pg3200.txt +wets pg3200.txt +wetter pg31100.txt +wetterhorn pg3200.txt +wettest, pg31100.txt +wetting pg3200.txt +wetting; pg3200.txt +weymouth, pg31100.txt +weymouth. pg31100.txt +weymouth." pg31100.txt +wha--what pg3200.txt +whack pg3200.txt +whah pg3200.txt +whah--" pg3200.txt +whale pg3200.txt +whale's pg3200.txt +whale-blubber!--' pg3200.txt +whale. pg100.txt +whalebone pg3200.txt +whaleman. pg3200.txt +whaler pg3200.txt +whaler. pg3200.txt +whaler." pg3200.txt +whales pg3200.txt +whales--another pg3200.txt +whales-bone; pg100.txt +whales. pg3200.txt +whaling pg3200.txt +whangarei pg3200.txt +whangaroa pg3200.txt +whar pg3200.txt +wharf pg3200.txt +wharf, pg100.txt +wharf. pg3200.txt +wharfage pg3200.txt +wharfboat, pg3200.txt +wharves pg3200.txt +wharves, pg3200.txt +what! pg31100.txt, pg3200.txt, pg100.txt +what!" pg3200.txt +what'll pg3200.txt +what's pg3200.txt, pg100.txt +what, pg31100.txt, pg3200.txt, pg100.txt +what- pg100.txt +what--" pg3200.txt +what--happened,and pg3200.txt +what--something pg31100.txt +what-asleep? pg3200.txt +what-in-hell-did-i-come-for! pg3200.txt +what-nots pg3200.txt +what-nots, pg3200.txt +what. pg31100.txt, pg3200.txt, pg100.txt +what." pg31100.txt, pg3200.txt +what: pg100.txt +what; pg31100.txt, pg3200.txt, pg100.txt +what? pg3200.txt, pg100.txt +what?" pg3200.txt +what?' pg3200.txt +what?- pg100.txt +whate'er pg100.txt +whateley. pg3200.txt +whatever pg31100.txt, pg3200.txt +whatever!--tell pg3200.txt +whatever, pg3200.txt +whatever. pg3200.txt +whatever." pg3200.txt +whatever? pg3200.txt +whatsoe'er. pg100.txt +whatsoever pg3200.txt, pg100.txt +whatsoever, pg3200.txt, pg100.txt +whatsoever. pg3200.txt, pg100.txt +whatsoever." pg3200.txt +whatsyourname" pg3200.txt +wheat!" pg3200.txt +wheat, pg3200.txt, pg100.txt +wheat." pg3200.txt +wheat; pg3200.txt +wheat? pg100.txt +wheat?" pg3200.txt +wheel pg3200.txt, pg100.txt +wheel!" pg3200.txt +wheel!' pg3200.txt +wheel, pg3200.txt, pg100.txt +wheel--but pg3200.txt +wheel-tracks. pg3200.txt +wheel. pg3200.txt, pg100.txt +wheel." pg3200.txt +wheel.' pg3200.txt +wheel: pg3200.txt +wheel; pg3200.txt, pg100.txt +wheel?' pg3200.txt +wheelbarrow pg3200.txt +wheelbarrows pg3200.txt +wheeled pg3200.txt +wheeler pg3200.txt +wheeler's pg3200.txt +wheeler, pg3200.txt +wheels pg31100.txt, pg3200.txt, pg100.txt +wheels! pg3200.txt, pg100.txt +wheels, pg3200.txt +wheels. pg3200.txt, pg100.txt +wheels; pg100.txt +wheels? pg100.txt +wheelwright, pg3200.txt +wheelwright. pg3200.txt +wheeze pg3200.txt +wheezing pg3200.txt +wheezy, pg3200.txt +whelp pg100.txt +whelp!' pg3200.txt +whelp. pg100.txt +whelp; pg100.txt +when, pg31100.txt, pg3200.txt, pg100.txt +when-- pg3200.txt +when--" pg3200.txt +when----" pg3200.txt +when--what pg3200.txt +when. pg3200.txt, pg100.txt +when." pg3200.txt +when..." pg3200.txt +when? pg3200.txt, pg100.txt +when?" pg3200.txt +when?--" pg3200.txt +whenas pg3200.txt +whence pg3200.txt, pg100.txt +whence? pg100.txt +whencesoever. pg100.txt +whenever pg31100.txt, pg3200.txt +whenever, pg3200.txt +whensoever pg3200.txt +where! pg3200.txt +where!" pg3200.txt +where's pg100.txt +where, pg31100.txt, pg3200.txt, pg100.txt +where--" pg3200.txt +where--are--you--from?" pg3200.txt +where--on pg3200.txt +where. pg3200.txt, pg100.txt +where." pg31100.txt +where: pg100.txt +where; pg31100.txt, pg100.txt +where? pg3200.txt, pg100.txt +where?" pg31100.txt, pg3200.txt +where?' pg3200.txt +whereabout, pg100.txt +whereabout. pg100.txt +whereabouts pg31100.txt, pg3200.txt +whereabouts. pg3200.txt +whereabouts." pg3200.txt +whereas pg3200.txt, pg100.txt +whereas, pg3200.txt +whereas--" pg3200.txt +whereby pg3200.txt +wherefore pg3200.txt, pg100.txt +wherefore, pg3200.txt, pg100.txt +wherefore. pg100.txt +wherefore? pg100.txt +wherefores pg3200.txt +wherein pg3200.txt, pg100.txt +wherein, pg100.txt +wherein? pg100.txt +whereof pg3200.txt, pg100.txt +whereof, pg100.txt +whereon pg3200.txt +whereon, pg100.txt +whereout pg100.txt +wheresoe'er pg100.txt +wheresoever. pg3200.txt +whereunto pg100.txt +whereupon pg3200.txt, pg100.txt +whereupon, pg100.txt +whereupon- pg100.txt +wherever pg31100.txt, pg3200.txt +wherewith pg3200.txt, pg100.txt +wherewith-' pg100.txt +wherewithal pg3200.txt, pg100.txt +wherewithal. pg100.txt +whet pg100.txt +whether pg31100.txt, pg3200.txt, pg100.txt +whether, pg31100.txt +whether--" pg3200.txt +whetstone pg3200.txt +whetstone, pg3200.txt +whetstone. pg100.txt +whetstone; pg100.txt +whey pg3200.txt +whey, pg3200.txt +whey-face? pg100.txt +whey. pg3200.txt +which, pg31100.txt, pg3200.txt, pg100.txt +which,- pg100.txt +which-- pg3200.txt +which--" pg31100.txt, pg3200.txt +which--but pg3200.txt +which--in pg31100.txt +which. pg31100.txt, pg3200.txt, pg100.txt +which." pg31100.txt, pg3200.txt +which: pg100.txt +which; pg3200.txt +which?" pg3200.txt +whichever pg31100.txt, pg3200.txt +whiff pg3200.txt +whiff, pg3200.txt +whiffs pg3200.txt +while pg31100.txt, pg3200.txt, pg100.txt +while! pg100.txt +while!- pg100.txt +while, pg31100.txt, pg3200.txt, pg100.txt +while-- pg3200.txt +while--interiorly--through pg3200.txt +while--you're pg3200.txt +while-ere? pg100.txt +while. pg31100.txt, pg3200.txt, pg100.txt +while." pg31100.txt, pg3200.txt +while: pg3200.txt +while; pg31100.txt, pg3200.txt, pg100.txt +while? pg31100.txt, pg100.txt +whiles pg100.txt +whiles! pg100.txt +whilst pg31100.txt, pg3200.txt, pg100.txt +whilst- pg100.txt +whilst? pg100.txt +whim pg31100.txt +whimper, pg3200.txt +whimpering, pg3200.txt +whims, pg3200.txt +whims." pg31100.txt +whimsical, pg31100.txt +whin pg3200.txt +whine, pg3200.txt +whine? pg100.txt +whined. pg100.txt +whining pg3200.txt +whip pg3200.txt, pg100.txt +whip, pg3200.txt +whip-cracking, pg3200.txt +whip-lash), pg3200.txt +whip-stock pg3200.txt +whip. pg3200.txt, pg100.txt +whip; pg100.txt +whipp'd. pg100.txt +whipped pg3200.txt +whipping pg3200.txt +whipping, pg100.txt +whipping-boy. pg3200.txt +whipping-boy." pg3200.txt +whipping-boy?" pg3200.txt +whipping-boys, pg3200.txt +whipping. pg3200.txt +whipping?" pg3200.txt +whippoorwill pg3200.txt +whips pg3200.txt, pg100.txt +whips--it pg3200.txt +whips? pg100.txt +whipt pg100.txt +whipt, pg100.txt +whipt. pg100.txt +whipt? pg100.txt +whir pg3200.txt +whirl pg3200.txt, pg100.txt +whirl!" pg3200.txt +whirl'd pg100.txt +whirl. pg3200.txt +whirled pg3200.txt +whirligig.' pg3200.txt +whirling pg3200.txt +whirlpool pg3200.txt +whirlpool. pg3200.txt +whirlpools, pg3200.txt +whirls pg3200.txt +whirlwind's pg3200.txt +whirlwind, pg3200.txt +whirlwind--two pg3200.txt +whirring pg3200.txt +whish pg3200.txt +whisked pg31100.txt, pg3200.txt +whiskers pg3200.txt +whiskers, pg3200.txt +whiskers. pg3200.txt +whiskey pg3200.txt +whiskey-drinking, pg3200.txt +whiskey-sodden pg3200.txt +whiskey. pg3200.txt +whiskey." pg3200.txt +whisky pg3200.txt +whisky, pg3200.txt +whisky-mill pg3200.txt +whisky. pg3200.txt +whisky?" pg3200.txt +whisper pg31100.txt, pg3200.txt, pg100.txt +whisper, pg31100.txt, pg3200.txt +whisper-- pg3200.txt +whisper. pg31100.txt, pg3200.txt, pg100.txt +whisper._] pg31100.txt +whisper: pg31100.txt, pg3200.txt +whisper? pg100.txt +whisper] pg100.txt +whispered pg31100.txt, pg3200.txt +whispered-- pg3200.txt +whispered. pg3200.txt +whispered: pg3200.txt +whispering pg31100.txt, pg3200.txt, pg100.txt +whispering, pg31100.txt, pg3200.txt +whisperings pg3200.txt +whisperings, pg3200.txt +whisperings. pg3200.txt +whisperings: pg3200.txt +whispers pg3200.txt, pg100.txt +whispers" pg3200.txt +whispers, pg3200.txt +whispers. pg3200.txt, pg100.txt +whispers: pg3200.txt +whispers] pg100.txt +whist, pg100.txt +whist-players, pg31100.txt +whist-table pg31100.txt +whist. pg31100.txt +whistle pg3200.txt, pg100.txt +whistle, pg3200.txt +whistle. pg3200.txt, pg100.txt +whistled. pg3200.txt +whistles pg3200.txt +whistles, pg3200.txt +whistles--i pg3200.txt +whistling pg3200.txt +whistling, pg3200.txt +whistling. pg3200.txt +whit, pg100.txt +whit. pg100.txt +whitaker pg31100.txt +white pg31100.txt, pg3200.txt, pg100.txt +white!" pg31100.txt +white!' pg3200.txt +white, pg3200.txt, pg100.txt +white- pg3200.txt +white--handsome pg3200.txt +white--i pg3200.txt +white-bearded pg3200.txt, pg100.txt +white-caps pg3200.txt +white-haired pg3200.txt +white-haired, pg3200.txt +white-hot pg3200.txt +white-liver'd pg100.txt +white-pine pg3200.txt +white-wash.' pg3200.txt +white. pg3200.txt, pg100.txt +white." pg31100.txt, pg3200.txt +white.' pg3200.txt, pg100.txt +white: pg100.txt +white; pg3200.txt, pg100.txt +white? pg100.txt +whitefriars pg3200.txt +whitehall, pg31100.txt +whitehall. pg100.txt +whitely pg3200.txt +whiteman pg3200.txt +whiteness pg3200.txt +whiteness, pg3200.txt +whiter pg3200.txt +whites pg3200.txt, pg100.txt +whites, pg3200.txt +whites. pg3200.txt +whitest pg3200.txt +whitest--there pg3200.txt +whitewash pg3200.txt +whitewash, pg3200.txt +whitewash. pg3200.txt +whitewashed pg3200.txt +whitewashed, pg3200.txt +whitewashin'." pg3200.txt +whitey-gray. pg3200.txt +whither pg31100.txt, pg3200.txt, pg100.txt +whither, pg3200.txt +whither. pg3200.txt, pg100.txt +whither? pg3200.txt, pg100.txt +whitmer, pg3200.txt +whitmore pg3200.txt, pg100.txt +whitmore. pg100.txt +whitsters pg100.txt +whittled pg3200.txt +whittling pg3200.txt +whitwell." pg31100.txt +whitworth.'" pg3200.txt +whiz pg3200.txt +whiz! pg3200.txt +whiz, pg3200.txt +whiz. pg3200.txt +whizzing pg3200.txt +who's pg3200.txt, pg100.txt +who, pg31100.txt, pg3200.txt, pg100.txt +who- pg100.txt +who-- pg3200.txt +who--" pg3200.txt +who--who--why, pg3200.txt +who-whooing pg3200.txt +who. pg3200.txt, pg100.txt +who." pg3200.txt +who? pg100.txt +who?" pg3200.txt +who?' pg3200.txt +whoa-ho-hoa! pg100.txt +whoever pg31100.txt, pg3200.txt +whole pg31100.txt, pg3200.txt, pg100.txt +whole!" pg3200.txt +whole, pg31100.txt, pg3200.txt, pg100.txt +whole. pg31100.txt, pg3200.txt, pg100.txt +whole." pg31100.txt +whole.' pg3200.txt +whole: pg3200.txt +whole; pg100.txt +whole? pg3200.txt, pg100.txt +wholesale pg3200.txt +wholesale. pg3200.txt +wholesale] pg3200.txt +wholesome pg31100.txt, pg3200.txt, pg100.txt +wholesome-profitable pg100.txt +wholesome. pg31100.txt, pg3200.txt +wholesome; pg31100.txt +wholesomely. pg3200.txt +wholesomer pg3200.txt +wholesomest--' pg3200.txt +wholly pg31100.txt, pg3200.txt +wholly--there pg3200.txt +wholly. pg100.txt +wholly; pg31100.txt +whom pg31100.txt, pg3200.txt, pg100.txt +whom, pg31100.txt, pg3200.txt, pg100.txt +whom--" pg3200.txt +whom. pg100.txt +whom: pg31100.txt +whom? pg3200.txt, pg100.txt +whom?" pg3200.txt +whoo- pg3200.txt +whoop pg3200.txt +whoop!" pg3200.txt +whoop, pg3200.txt +whoop. pg3200.txt +whooped pg3200.txt +whooping pg3200.txt +whooping! pg100.txt +whooping, pg3200.txt +whoops pg3200.txt +whopper pg3200.txt +whore! pg100.txt +whore, pg100.txt +whore. pg100.txt +whore.' pg100.txt +whore; pg100.txt +whore? pg100.txt +whoremaster pg100.txt +whoremasterly pg100.txt +whoremonger, pg100.txt +whoreson pg100.txt +whoreson, pg100.txt +whoresons pg100.txt +whose pg31100.txt, pg3200.txt, pg100.txt +whose--" pg3200.txt +whose. pg100.txt +why pg31100.txt, pg3200.txt, pg100.txt +why" pg3200.txt +why, pg31100.txt, pg3200.txt, pg100.txt +why- pg100.txt +why--" pg3200.txt +why--' pg3200.txt +why. pg31100.txt, pg3200.txt, pg100.txt +why." pg31100.txt, pg3200.txt +why; pg3200.txt +why? pg31100.txt, pg3200.txt, pg100.txt +why?" pg31100.txt, pg3200.txt +why?' pg3200.txt +whyd'nt pg3200.txt +whymper's pg3200.txt +wi' pg3200.txt +wick pg3200.txt +wicked pg31100.txt, pg3200.txt, pg100.txt +wicked, pg3200.txt +wicked. pg3200.txt +wicked." pg3200.txt +wicked? pg100.txt +wickedest pg31100.txt, pg3200.txt +wickedest, pg3200.txt +wickedly pg3200.txt +wickednes pg100.txt +wickedness pg31100.txt, pg100.txt +wickedness, pg3200.txt, pg100.txt +wickedness. pg3200.txt, pg100.txt +wickedness? pg100.txt +wicket. pg3200.txt +wickets pg3200.txt +wickham pg31100.txt +wickham! pg31100.txt +wickham" pg31100.txt +wickham's pg31100.txt +wickham, pg31100.txt +wickham--when pg31100.txt +wickham. pg31100.txt +wickham; pg31100.txt +wickham?" pg31100.txt +wid pg3200.txt +widder's pg3200.txt +widder's." pg3200.txt +widder." pg3200.txt +wide pg31100.txt, pg3200.txt, pg100.txt +wide! pg100.txt +wide, pg3200.txt, pg100.txt +wide," pg3200.txt +wide-brimmed pg3200.txt +wide-enlarg'd. pg100.txt +wide-spreading pg3200.txt +wide. pg3200.txt, pg100.txt +wide; pg100.txt +wide? pg100.txt +widely pg31100.txt, pg3200.txt +widely!" pg3200.txt +widen pg3200.txt +widened pg3200.txt +widening pg3200.txt +wider pg31100.txt, pg3200.txt +wider, pg3200.txt +wider-awake pg3200.txt +wider. pg3200.txt +widest pg31100.txt, pg3200.txt, pg100.txt +widger pg31100.txt +widout." pg3200.txt +widow pg31100.txt, pg3200.txt, pg100.txt +widow! pg100.txt +widow's pg3200.txt +widow's, pg3200.txt, pg100.txt +widow's." pg3200.txt +widow, pg31100.txt, pg3200.txt, pg100.txt +widow,' pg3200.txt +widow. pg31100.txt, pg3200.txt, pg100.txt +widow." pg3200.txt +widow; pg100.txt +widower, pg31100.txt +widower. pg31100.txt, pg100.txt +widowhood pg3200.txt +widowhood, pg31100.txt +widows pg3200.txt, pg100.txt +widows, pg3200.txt +width pg3200.txt +wieder pg3200.txt +wieder! pg3200.txt +wieder. pg3200.txt +wiedererstellungbestrebungen. pg3200.txt +wiedersehen. pg3200.txt +wiegand pg3200.txt +wiegand. pg3200.txt +wield pg3200.txt +wien, pg3200.txt +wife pg31100.txt, pg3200.txt, pg100.txt +wife! pg31100.txt, pg100.txt +wife!" pg3200.txt +wife" pg3200.txt +wife's pg31100.txt, pg3200.txt, pg100.txt +wife's. pg31100.txt +wife's? pg100.txt +wife) pg3200.txt, pg100.txt +wife, pg31100.txt, pg3200.txt, pg100.txt +wife- pg100.txt +wife-- pg3200.txt +wife--" pg3200.txt +wife--a pg3200.txt +wife--bearing pg3200.txt +wife--she pg3200.txt +wife-beaters; pg3200.txt +wife. pg31100.txt, pg3200.txt, pg100.txt +wife." pg31100.txt, pg3200.txt +wife.' pg100.txt +wife: pg3200.txt, pg100.txt +wife; pg31100.txt, pg3200.txt, pg100.txt +wife? pg3200.txt, pg100.txt +wife?" pg31100.txt, pg3200.txt +wife]. pg100.txt +wig pg3200.txt +wig-making pg3200.txt +wig. pg3200.txt +wig; pg31100.txt +wiggling pg3200.txt +wight! pg100.txt +wight, pg100.txt +wights, pg100.txt +wigmore pg31100.txt +wigwam pg3200.txt +wigwam, pg3200.txt +wigwam. pg3200.txt +wigwams: pg3200.txt +wilbrandt, pg3200.txt +wilcox pg31100.txt +wild pg31100.txt, pg3200.txt, pg100.txt +wild, pg3200.txt, pg100.txt +wild-cat. pg3200.txt +wild-cat? pg100.txt +wild-eyed: pg3200.txt +wild. pg3200.txt +wild." pg31100.txt +wild; pg3200.txt, pg100.txt +wild?" pg3200.txt +wildcat pg3200.txt +wildcat, pg3200.txt +wildcats pg3200.txt +wildenheim pg31100.txt +wilder pg3200.txt +wilder, pg3200.txt +wilder.'" pg3200.txt +wilderness pg31100.txt, pg3200.txt, pg100.txt +wilderness, pg31100.txt, pg3200.txt, pg100.txt +wilderness. pg3200.txt +wilderness." pg31100.txt +wilderness; pg3200.txt +wilderness? pg100.txt +wildest pg31100.txt, pg3200.txt +wildfire pg3200.txt +wildly pg3200.txt, pg100.txt +wildly. pg100.txt +wildly? pg100.txt +wildness pg3200.txt, pg100.txt +wildness. pg3200.txt +wildness; pg100.txt +wilds pg3200.txt, pg100.txt +wilds] pg3200.txt +wiles, pg100.txt +wiley. pg3200.txt +wilful pg3200.txt +wilful, pg3200.txt +wilful-blame, pg100.txt +wilful-opposite, pg100.txt +wilful-slow, pg100.txt +wilfull pg31100.txt +wilfull-negligent, pg100.txt +wilfully pg31100.txt, pg3200.txt, pg100.txt +wilfully. pg100.txt +wilfulnes pg100.txt +wilhelm pg3200.txt +wilhelmsh:ohe? pg3200.txt +wilkerson pg3200.txt +wilkes, pg3200.txt +wilkinsons, pg3200.txt +wilks pg3200.txt +wilks, pg3200.txt +wilks." pg3200.txt +wilks; pg3200.txt +wilkses; pg3200.txt +will! pg3200.txt, pg100.txt +will!" pg3200.txt +will'd pg100.txt +will'd. pg100.txt +will, pg31100.txt, pg3200.txt, pg100.txt +will," pg3200.txt +will- pg100.txt +will-" pg100.txt +will--" pg3200.txt +will--and pg31100.txt, pg3200.txt +will--which pg3200.txt +will--will pg3200.txt +will-power. pg3200.txt +will. pg31100.txt, pg3200.txt, pg100.txt +will." pg31100.txt, pg3200.txt +will.' pg3200.txt +will.] pg3200.txt +will: pg100.txt +will; pg3200.txt, pg100.txt +will? pg3200.txt, pg100.txt +will?" pg3200.txt +willard, pg3200.txt +willen!" pg3200.txt +william pg31100.txt, pg3200.txt, pg100.txt +william! pg31100.txt +william!" pg3200.txt +william's pg31100.txt, pg3200.txt +william, pg31100.txt, pg3200.txt, pg100.txt +william. pg31100.txt, pg3200.txt, pg100.txt +william." pg31100.txt, pg3200.txt +william: pg31100.txt +william? pg100.txt +william?" pg31100.txt, pg3200.txt +williams pg3200.txt, pg100.txt +williams' pg3200.txt +williams-- pg3200.txt +williams. pg31100.txt +williams." pg3200.txt +williams; pg3200.txt +willing pg31100.txt, pg3200.txt, pg100.txt +willing, pg3200.txt, pg100.txt +willing. pg3200.txt, pg100.txt +willing? pg3200.txt, pg100.txt +willinger pg3200.txt +willingest pg3200.txt +willingly pg31100.txt, pg100.txt +willingly, pg100.txt +willingly. pg100.txt +willingly." pg31100.txt +willingly; pg100.txt +willingness pg31100.txt, pg3200.txt +willingness. pg100.txt +willis pg3200.txt +willis, pg3200.txt +willis. pg3200.txt +willoughby pg31100.txt, pg100.txt +willoughby!"-- pg31100.txt +willoughby's pg31100.txt +willoughby, pg31100.txt, pg100.txt +willoughby," pg31100.txt +willoughby. pg31100.txt +willoughby." pg31100.txt +willoughby; pg31100.txt +willoughby? pg31100.txt +willoughbys pg31100.txt +willoughbys, pg31100.txt +willow pg3200.txt +willow-" pg100.txt +willow. pg100.txt +willow." pg100.txt +willow; pg100.txt +willows pg3200.txt +wills pg3200.txt, pg100.txt +wills, pg100.txt +wills. pg3200.txt, pg100.txt +wills: pg100.txt +wills; pg100.txt +wills? pg100.txt +wilmington. pg3200.txt +wilson pg31100.txt, pg3200.txt +wilson! pg3200.txt +wilson!" pg3200.txt +wilson's pg3200.txt +wilson, pg3200.txt +wilson. pg3200.txt +wilson." pg3200.txt +wilson: pg3200.txt +wilson; pg3200.txt +wilson?" pg3200.txt +wilsons, pg3200.txt +wilt pg3200.txt, pg100.txt +wilt, pg3200.txt, pg100.txt +wilt. pg3200.txt, pg100.txt +wilt; pg100.txt +wilted pg3200.txt +wilted. pg3200.txt +wiltshire pg100.txt +wiltshire. pg31100.txt +wily pg3200.txt +wimberger!' pg3200.txt +wimberger. pg3200.txt +wimpfen--in pg3200.txt +wimpole pg31100.txt +win pg3200.txt, pg100.txt +win! pg100.txt +win' pg3200.txt +win, pg3200.txt, pg100.txt +win. pg3200.txt, pg100.txt +win; pg3200.txt, pg100.txt +win? pg100.txt +wince pg3200.txt +wince, pg3200.txt +winced. pg3200.txt +winchester pg3200.txt, pg100.txt +winchester's, pg100.txt +winchester, pg100.txt +winchester. pg3200.txt, pg100.txt +winchester; pg100.txt +wincing pg3200.txt +wind pg31100.txt, pg3200.txt, pg100.txt +wind! pg3200.txt, pg100.txt +wind!" pg3200.txt +wind), pg3200.txt +wind, pg31100.txt, pg3200.txt, pg100.txt +wind--said pg31100.txt +wind-mill. pg3200.txt +wind-reef, pg3200.txt +wind-up pg31100.txt +wind. pg31100.txt, pg3200.txt, pg100.txt +wind." pg3200.txt +wind: pg3200.txt, pg100.txt +wind; pg3200.txt, pg100.txt +winded, pg3200.txt +winder pg3200.txt +windfall pg3200.txt +windfall, pg3200.txt +windin' pg3200.txt +winding pg31100.txt, pg3200.txt +winding-sheet; pg100.txt +winding-up pg3200.txt +windings pg31100.txt, pg3200.txt +windlass. pg3200.txt +windmill pg3200.txt +window pg31100.txt, pg3200.txt, pg100.txt +window!" pg3200.txt +window, pg31100.txt, pg3200.txt, pg100.txt +window-- pg31100.txt +window--arch pg3200.txt +window-curtains. pg3200.txt +window-curtains; pg3200.txt +window-hole. pg3200.txt +window-panes; pg3200.txt +window-seat pg3200.txt +window-seats pg31100.txt +window-shutter pg31100.txt +window-shutter._] pg31100.txt +window-sill pg3200.txt +window. pg31100.txt, pg3200.txt, pg100.txt +window." pg3200.txt +window.' pg3200.txt +window: pg3200.txt +window; pg31100.txt, pg3200.txt, pg100.txt +window? pg100.txt +window] pg100.txt +windows pg31100.txt, pg3200.txt +windows, pg31100.txt, pg3200.txt, pg100.txt +windows. pg31100.txt, pg3200.txt +windows; pg3200.txt +winds pg31100.txt, pg3200.txt, pg100.txt +winds, pg3200.txt, pg100.txt +winds. pg3200.txt +winds; pg100.txt +winds? pg100.txt +windsor pg31100.txt, pg100.txt +windsor, pg100.txt +windsor-july. pg31100.txt +windsor. pg100.txt +windsor; pg31100.txt +windsor?" pg31100.txt +windus pg3200.txt +windward. pg3200.txt +windy pg3200.txt, pg100.txt +windy, pg3200.txt +wine pg31100.txt, pg3200.txt, pg100.txt +wine! pg3200.txt +wine!" pg3200.txt +wine" pg3200.txt +wine, pg3200.txt, pg100.txt +wine-bags pg3200.txt +wine-but pg100.txt +wine. pg3200.txt, pg100.txt +wine.' pg3200.txt +wine; pg3200.txt, pg100.txt +wine? pg31100.txt, pg100.txt +wine]. pg100.txt +wines pg3200.txt +wines. pg3200.txt +wing pg31100.txt, pg100.txt +wing'd pg100.txt +wing'd. pg100.txt +wing, pg3200.txt, pg100.txt +wing--but pg3200.txt +wing--paris pg3200.txt +wing-dams pg3200.txt +wing. pg3200.txt +wing; pg3200.txt +winged pg3200.txt +wingfield pg31100.txt +wingfield, pg31100.txt +wingham- pg100.txt +wingless pg3200.txt +wings pg31100.txt, pg3200.txt, pg100.txt +wings! pg100.txt +wings!" pg3200.txt +wings, pg3200.txt, pg100.txt +wings--and pg3200.txt +wings--wouldn't pg3200.txt +wings. pg3200.txt, pg100.txt +wings." pg3200.txt +wings; pg3200.txt, pg100.txt +wings?" pg3200.txt +wink pg3200.txt +wink'st pg100.txt +wink. pg100.txt +wink; pg100.txt +wink? pg100.txt +winked pg3200.txt +winked. pg3200.txt +winkelmatten. pg3200.txt +winking pg31100.txt +winking. pg100.txt +winks pg3200.txt +winks, pg3200.txt +winks; pg100.txt +winner. pg100.txt +winners pg3200.txt +winning pg31100.txt, pg3200.txt, pg100.txt +winnings pg3200.txt +winnowed pg3200.txt +winona?' pg3200.txt +wins pg31100.txt, pg3200.txt +wins; pg100.txt +winter pg31100.txt, pg3200.txt, pg100.txt +winter' pg3200.txt +winter's pg3200.txt +winter, pg31100.txt, pg3200.txt, pg100.txt +winter--and pg31100.txt +winter. pg31100.txt, pg3200.txt, pg100.txt +winter." pg31100.txt, pg3200.txt +winters pg3200.txt +winters' pg3200.txt +winters, pg3200.txt +winters--and pg3200.txt +winters. pg3200.txt, pg100.txt +winthrop." pg31100.txt +wintry pg3200.txt +wipe pg3200.txt +wiped pg31100.txt, pg3200.txt +wiping pg31100.txt, pg3200.txt +wird pg3200.txt +wire pg3200.txt +wire, pg3200.txt +wire--set pg3200.txt +wire-netting. pg3200.txt +wire. pg3200.txt +wire; pg3200.txt +wired pg3200.txt +wires pg3200.txt +wires. pg3200.txt +wirklich pg3200.txt +wirrega pg3200.txt +wirthin. pg3200.txt +wirthin? pg3200.txt +wisconsin pg3200.txt +wisdom pg3200.txt, pg100.txt +wisdom!" pg3200.txt +wisdom, pg31100.txt, pg3200.txt, pg100.txt +wisdom. pg31100.txt, pg3200.txt, pg100.txt +wisdoms, pg3200.txt, pg100.txt +wise pg31100.txt, pg3200.txt, pg100.txt +wise, pg3200.txt, pg100.txt +wise- pg100.txt +wise. pg31100.txt, pg3200.txt, pg100.txt +wise." pg31100.txt +wise: pg3200.txt, pg100.txt +wise; pg100.txt +wise? pg100.txt +wiseacres pg3200.txt +wisely pg31100.txt, pg3200.txt +wisely, pg3200.txt, pg100.txt +wisely. pg100.txt +wisely." pg3200.txt +wiser pg31100.txt, pg3200.txt +wiser, pg3200.txt, pg100.txt +wiser. pg100.txt +wiser." pg3200.txt +wisest pg31100.txt, pg3200.txt +wisest; pg100.txt +wish pg31100.txt, pg3200.txt, pg100.txt +wish! pg100.txt +wish!- pg100.txt +wish'd pg100.txt +wish'd. pg100.txt +wish, pg31100.txt, pg3200.txt, pg100.txt +wish- pg100.txt +wish--" pg3200.txt +wish. pg31100.txt, pg3200.txt, pg100.txt +wish." pg31100.txt, pg3200.txt +wish; pg31100.txt, pg3200.txt, pg100.txt +wish? pg100.txt +wish?" pg3200.txt +wished pg31100.txt, pg3200.txt +wished, pg31100.txt, pg3200.txt +wished--but pg31100.txt +wished. pg31100.txt +wished." pg31100.txt +wished: pg31100.txt +wishes pg31100.txt, pg3200.txt, pg100.txt +wishes!" pg31100.txt +wishes, pg31100.txt, pg3200.txt, pg100.txt +wishes. pg31100.txt, pg3200.txt, pg100.txt +wishes." pg31100.txt +wishes; pg31100.txt, pg100.txt +wishes? pg31100.txt +wishing pg31100.txt, pg3200.txt +wishing, pg100.txt +wisht pg3200.txt +wising pg3200.txt +wist pg3200.txt +wistfully pg3200.txt +wistfully. pg3200.txt +wistfulness, pg3200.txt +wit! pg100.txt +wit' pg3200.txt +wit'- pg100.txt +wit, pg31100.txt, pg3200.txt, pg100.txt +wit- pg100.txt +wit--but pg3200.txt +wit-old. pg100.txt +wit. pg31100.txt, pg3200.txt, pg100.txt +wit." pg31100.txt +wit: pg3200.txt, pg100.txt +wit:-- pg3200.txt +wit; pg100.txt +wit? pg100.txt +witch pg3200.txt, pg100.txt +witch! pg3200.txt, pg100.txt +witch, pg3200.txt, pg100.txt +witch--a pg3200.txt +witch-things." pg3200.txt +witch. pg31100.txt, pg100.txt +witch." pg3200.txt +witch: pg100.txt +witchcraft pg3200.txt, pg100.txt +witchcraft, pg100.txt +witchcraft. pg3200.txt +witchery pg3200.txt +witches pg3200.txt, pg100.txt +witches, pg3200.txt +witches. pg3200.txt, pg100.txt +with! pg31100.txt, pg3200.txt, pg100.txt +with!" pg3200.txt +with't, pg100.txt +with't. pg100.txt +with't; pg100.txt +with). pg3200.txt +with, pg31100.txt, pg3200.txt, pg100.txt +with," pg3200.txt +with- pg100.txt +with-- pg31100.txt, pg3200.txt +with--" pg3200.txt +with----fanny, pg31100.txt +with--apparently pg3200.txt +with--but pg3200.txt +with--if pg3200.txt +with--quick pg3200.txt +with--the pg3200.txt +with--which pg3200.txt +with. pg31100.txt, pg3200.txt, pg100.txt +with." pg31100.txt, pg3200.txt +with.' pg3200.txt +with: pg31100.txt, pg3200.txt, pg100.txt +with; pg31100.txt, pg3200.txt, pg100.txt +with? pg3200.txt, pg100.txt +with?" pg31100.txt, pg3200.txt +with?' pg3200.txt +withal pg3200.txt, pg100.txt +withal! pg100.txt +withal!" pg3200.txt +withal, pg3200.txt, pg100.txt +withal- pg100.txt +withal. pg3200.txt, pg100.txt +withal." pg3200.txt +withal; pg3200.txt, pg100.txt +withal? pg3200.txt, pg100.txt +withal?" pg3200.txt +withdraw pg31100.txt, pg3200.txt, pg100.txt +withdraw, pg31100.txt, pg100.txt +withdraw. pg100.txt +withdraw? pg100.txt +withdraw] pg100.txt +withdrawing pg31100.txt, pg3200.txt +withdrawing. pg100.txt +withdrawn pg3200.txt, pg100.txt +withdrew pg31100.txt, pg3200.txt, pg100.txt +withdrew, pg31100.txt +wither pg100.txt +wither'd pg100.txt +wither'd, pg100.txt +wither'd. pg100.txt +withered pg3200.txt +withered, pg100.txt +withered. pg100.txt +withering pg3200.txt +witherosever pg3200.txt +withers pg3200.txt, pg100.txt +withers, pg3200.txt +withers. pg3200.txt +withers." pg3200.txt +withheld pg3200.txt +withheld! pg3200.txt +withheld, pg31100.txt +withheld--that pg3200.txt +withheld. pg3200.txt, pg100.txt +withheld; pg100.txt +withhold pg31100.txt, pg3200.txt +withholds pg3200.txt +within pg31100.txt, pg3200.txt, pg100.txt +within! pg100.txt +within, pg31100.txt, pg3200.txt, pg100.txt +within--joan's pg3200.txt +within. pg31100.txt, pg3200.txt, pg100.txt +within." pg31100.txt, pg3200.txt +within.] pg100.txt +within; pg100.txt +within? pg100.txt +within?" pg3200.txt +within?' pg3200.txt +within] pg100.txt +without pg31100.txt, pg3200.txt, pg100.txt +without, pg31100.txt, pg3200.txt, pg100.txt +without--compulsion. pg3200.txt +without--i pg3200.txt +without-any pg3200.txt +without. pg31100.txt, pg3200.txt, pg100.txt +without." pg31100.txt +without; pg100.txt +withstand pg3200.txt +withstand, pg31100.txt +withstanding. pg100.txt +withstood: pg100.txt +witless pg3200.txt +witless, pg3200.txt +witness pg31100.txt, pg3200.txt, pg100.txt +witness! pg31100.txt +witness, pg3200.txt, pg100.txt +witness--and pg3200.txt +witness--witness pg3200.txt +witness. pg31100.txt, pg3200.txt, pg100.txt +witness." pg3200.txt +witness: pg3200.txt +witness; pg3200.txt, pg100.txt +witnessed pg31100.txt, pg3200.txt +witnessed, pg3200.txt +witnessed. pg3200.txt +witnesses pg3200.txt, pg100.txt +witnesses, pg3200.txt +witnesses. pg3200.txt, pg100.txt +witnesses." pg3200.txt +witnesses: pg100.txt +witnesseth pg100.txt +witnesseth: pg3200.txt +witnessing pg31100.txt, pg3200.txt, pg100.txt +witnessing! pg31100.txt +wits pg3200.txt, pg100.txt +wits! pg100.txt +wits, pg100.txt +wits. pg31100.txt, pg100.txt +wits; pg100.txt +wits? pg100.txt +wittenberg, pg100.txt +wittenberg. pg100.txt +wittenberg? pg100.txt +witticism pg3200.txt +witting pg3200.txt +wittingly pg3200.txt +wittingly. pg100.txt +witty pg31100.txt, pg3200.txt, pg100.txt +witty!' pg100.txt +witty." pg31100.txt +witty? pg100.txt +wiv'd. pg100.txt +wive, pg100.txt +wived? pg100.txt +wives pg3200.txt, pg100.txt +wives, pg31100.txt, pg3200.txt, pg100.txt +wives. pg31100.txt, pg3200.txt, pg100.txt +wives." pg3200.txt +wives; pg100.txt +wives? pg100.txt +wiving, pg100.txt +wobble pg3200.txt +wobble." pg3200.txt +wobble; pg3200.txt +wobblehorn, pg3200.txt +wobbler, pg3200.txt +wobbler." pg3200.txt +wobbly pg3200.txt +woe pg3200.txt, pg100.txt +woe! pg100.txt +woe, pg100.txt +woe-begone, pg100.txt +woe. pg3200.txt, pg100.txt +woe." pg31100.txt +woe; pg3200.txt, pg100.txt +woe? pg100.txt +woeful. pg3200.txt, pg100.txt +woes pg3200.txt, pg100.txt +woes! pg100.txt +woes, pg3200.txt, pg100.txt +woes. pg100.txt +woes: pg100.txt +woes; pg100.txt +wohl. pg3200.txt +wohl? pg3200.txt +wohlmeyer. pg3200.txt +woke pg31100.txt, pg3200.txt +woke; pg31100.txt +wolf pg100.txt +wolf!' pg3200.txt +wolf, pg3200.txt, pg100.txt +wolf. pg3200.txt, pg100.txt +wolf; pg100.txt +wolf? pg100.txt +wolfish pg3200.txt +wolfishly pg3200.txt +wollen, pg3200.txt +wollongong pg3200.txt +wolloway pg3200.txt +wolloway, pg3200.txt +wolsey pg100.txt +wolsey, pg31100.txt, pg100.txt +wolsey? pg3200.txt +wolves. pg100.txt +wolves; pg3200.txt +wolves? pg3200.txt +wom--" pg3200.txt +wom----" pg3200.txt +woman pg31100.txt, pg3200.txt, pg100.txt +woman! pg31100.txt, pg3200.txt, pg100.txt +woman!" pg3200.txt +woman!- pg100.txt +woman!---er pg3200.txt +woman!--er-- pg3200.txt +woman" pg3200.txt +woman'd. pg100.txt +woman's pg31100.txt, pg3200.txt, pg100.txt +woman, pg31100.txt, pg3200.txt, pg100.txt +woman- pg100.txt +woman--engaged pg31100.txt +woman--for pg3200.txt +woman-queller. pg100.txt +woman. pg31100.txt, pg3200.txt, pg100.txt +woman." pg31100.txt, pg3200.txt +woman.'" pg3200.txt +woman: pg31100.txt +woman; pg31100.txt, pg3200.txt, pg100.txt +woman? pg31100.txt, pg3200.txt, pg100.txt +woman?" pg31100.txt, pg3200.txt +womanhood, pg3200.txt +womanhood. pg3200.txt, pg100.txt +womanish. pg100.txt +womankind! pg100.txt +womanly pg31100.txt, pg3200.txt +womb pg3200.txt, pg100.txt +womb, pg100.txt +womb- pg100.txt +womb. pg3200.txt, pg100.txt +womb; pg100.txt +women pg31100.txt, pg3200.txt, pg100.txt +women! pg3200.txt, pg100.txt +women!" pg31100.txt +women's pg100.txt +women, pg31100.txt, pg3200.txt, pg100.txt +women--" pg31100.txt +women--and pg31100.txt +women--daughters, pg3200.txt +women--there pg3200.txt +women--with pg3200.txt +women. pg31100.txt, pg3200.txt, pg100.txt +women." pg31100.txt, pg3200.txt +women: pg3200.txt, pg100.txt +women; pg31100.txt, pg3200.txt, pg100.txt +women? pg100.txt +women?" pg31100.txt +won pg31100.txt, pg3200.txt, pg100.txt +won't pg31100.txt, pg3200.txt +won't!" pg3200.txt +won't. pg3200.txt +won't." pg3200.txt +won, pg3200.txt, pg100.txt +won. pg3200.txt, pg100.txt +won; pg100.txt +won? pg100.txt +woncot pg100.txt +wond'red pg100.txt +wonder pg31100.txt, pg3200.txt, pg100.txt +wonder! pg100.txt +wonder!" pg3200.txt +wonder!' pg3200.txt +wonder'd pg100.txt +wonder, pg31100.txt, pg3200.txt, pg100.txt +wonder-- pg3200.txt +wonder--you pg31100.txt +wonder-work, pg3200.txt +wonder. pg31100.txt, pg3200.txt, pg100.txt +wonder." pg31100.txt +wonder; pg31100.txt, pg100.txt +wonder? pg31100.txt, pg100.txt +wonder?" pg3200.txt +wondered pg31100.txt, pg3200.txt +wondered, pg31100.txt +wonderful pg31100.txt, pg3200.txt, pg100.txt +wonderful! pg31100.txt, pg3200.txt, pg100.txt +wonderful!" pg3200.txt +wonderful!' pg3200.txt +wonderful, pg3200.txt +wonderful. pg3200.txt, pg100.txt +wonderful." pg31100.txt, pg3200.txt +wonderful; pg3200.txt +wonderful? pg100.txt +wonderfully pg31100.txt, pg3200.txt +wonderfully, pg3200.txt +wonderfully." pg31100.txt +wondering pg31100.txt, pg3200.txt +wondering, pg31100.txt, pg3200.txt +wondering. pg3200.txt +wondering: pg3200.txt +wonderingly pg3200.txt +wonderings, pg31100.txt +wonderment pg3200.txt +wonders pg31100.txt, pg3200.txt, pg100.txt +wonders, pg3200.txt +wonders. pg3200.txt, pg100.txt +wondrous, pg3200.txt +wont pg31100.txt, pg3200.txt, pg100.txt +wont, pg100.txt +wont. pg100.txt +wont: pg3200.txt +wonted pg31100.txt +woo pg100.txt +woo'd pg100.txt +woo'd. pg100.txt +woo'd; pg100.txt +woo'd? pg100.txt +woo, pg100.txt +woo. pg100.txt +woo? pg100.txt +wood pg3200.txt, pg100.txt +wood! pg100.txt +wood, pg31100.txt, pg3200.txt, pg100.txt +wood- pg3200.txt +wood-louse pg3200.txt +wood-pile. pg3200.txt +wood-rank pg3200.txt +wood-yard. pg3200.txt +wood. pg3200.txt, pg100.txt +wood; pg3200.txt, pg100.txt +wood;--and pg31100.txt +wood?" pg3200.txt +woodbine, pg100.txt +woodby pg31100.txt +woodcock. pg3200.txt +woodcut pg3200.txt +wooded pg3200.txt +wooded, pg3200.txt +wooden pg3200.txt +woodhouse pg31100.txt +woodhouse! pg31100.txt +woodhouse"--he pg31100.txt +woodhouse's pg31100.txt +woodhouse's. pg31100.txt +woodhouse, pg31100.txt +woodhouse," pg31100.txt +woodhouse--"indeed pg31100.txt +woodhouse--(looking pg31100.txt +woodhouse--i pg31100.txt +woodhouse. pg31100.txt +woodhouse: pg31100.txt +woodhouses pg31100.txt +woodman pg100.txt +woodpile pg3200.txt +woodpiles pg3200.txt +woods pg31100.txt, pg3200.txt, pg100.txt +woods!" pg3200.txt +woods, pg31100.txt, pg3200.txt, pg100.txt +woods,--i pg31100.txt +woods. pg3200.txt +woods." pg3200.txt +woods.' pg3200.txt +woods.--miss pg31100.txt +woods: pg3200.txt +woods; pg3200.txt +woods? pg3200.txt, pg100.txt +woodshed pg3200.txt +woodshed, pg3200.txt +woodstock, pg100.txt +woodston pg31100.txt +woodston!" pg31100.txt +woodston, pg31100.txt +woodsy pg3200.txt +woody pg31100.txt, pg3200.txt +woodyard, pg3200.txt +woodyards; pg3200.txt +wooed pg100.txt +wooer pg100.txt +wooer, pg100.txt +wooer. pg100.txt +wooers. pg100.txt +wooers; pg100.txt +wooes pg100.txt +wooes, pg100.txt +wooing pg3200.txt, pg100.txt +wooing. pg100.txt +wooing: pg100.txt +wool pg3200.txt +wool-growers pg3200.txt +woolen pg3200.txt +woolgoolga pg3200.txt +woollen! pg100.txt +woolloomooloo pg3200.txt +woolloomooloo, pg3200.txt +woolundunga pg3200.txt +woolward pg100.txt +woos. pg3200.txt +worcester pg100.txt +worcester, pg100.txt +worcester. pg100.txt +worcester.' pg31100.txt +word pg31100.txt, pg3200.txt, pg100.txt +word! pg31100.txt, pg3200.txt, pg100.txt +word!" pg3200.txt +word!'" pg3200.txt +word, pg31100.txt, pg3200.txt, pg100.txt +word," pg31100.txt +word-- pg3200.txt +word--'lagniappe.' pg3200.txt +word--allow pg3200.txt +word--an pg3200.txt +word--and pg3200.txt +word--by pg3200.txt +word--if pg3200.txt +word--miss pg31100.txt +word--to pg31100.txt +word--training. pg3200.txt +word--when pg3200.txt +word. pg31100.txt, pg3200.txt, pg100.txt +word." pg31100.txt, pg3200.txt +word.' pg3200.txt +word.) pg3200.txt +word._" pg31100.txt +word: pg3200.txt, pg100.txt +word; pg31100.txt, pg3200.txt, pg100.txt +word? pg3200.txt, pg100.txt +word?" pg3200.txt +word?' pg3200.txt +worde, pg3200.txt +worded pg3200.txt +worded--to pg3200.txt +worded: pg3200.txt +worden!' pg3200.txt +wording pg3200.txt +wording." pg3200.txt +words pg31100.txt, pg3200.txt, pg100.txt +words! pg3200.txt, pg100.txt +words!" pg3200.txt +words" pg3200.txt +words' pg100.txt +words), pg3200.txt +words, pg31100.txt, pg3200.txt, pg100.txt +words," pg3200.txt +words- pg100.txt +words-- pg31100.txt, pg3200.txt +words--he pg3200.txt +words--indeed, pg3200.txt +words--not pg3200.txt +words--she pg3200.txt +words--the pg31100.txt +words--then pg3200.txt +words. pg31100.txt, pg3200.txt, pg100.txt +words." pg31100.txt, pg3200.txt +words.' pg3200.txt, pg100.txt +words.) pg3200.txt +words: pg31100.txt, pg3200.txt, pg100.txt +words:-- pg31100.txt, pg3200.txt +words; pg3200.txt, pg100.txt +words? pg3200.txt, pg100.txt +words?" pg3200.txt +wordy pg3200.txt +wore pg31100.txt, pg3200.txt, pg100.txt +wore. pg3200.txt +wore: pg3200.txt +wore? pg100.txt +wore?" pg3200.txt +work pg31100.txt, pg3200.txt, pg100.txt +work! pg3200.txt, pg100.txt +work!" pg3200.txt +work, pg31100.txt, pg3200.txt, pg100.txt +work,--even pg3200.txt +work- pg100.txt +work--and pg3200.txt +work--the pg3200.txt +work-bench, pg3200.txt +work-day pg3200.txt +work-hours pg3200.txt +work-like pg3200.txt +work-parlor pg3200.txt +work. pg31100.txt, pg3200.txt, pg100.txt +work." pg31100.txt, pg3200.txt +work.' pg3200.txt +work.--so pg3200.txt +work.--we pg3200.txt +work.... pg3200.txt +work: pg3200.txt +work; pg31100.txt, pg3200.txt, pg100.txt +work? pg31100.txt, pg3200.txt, pg100.txt +work?" pg3200.txt +workbasket, pg31100.txt +worked pg31100.txt, pg3200.txt +worked, pg3200.txt +worked. pg3200.txt +worker. pg3200.txt +worketh pg3200.txt +working pg31100.txt, pg3200.txt +working, pg3200.txt, pg100.txt +working-day pg3200.txt +working-day; pg100.txt +working-days; pg100.txt +working-plans pg3200.txt +working. pg3200.txt, pg100.txt +workingman pg3200.txt +workingmen's pg3200.txt +workings pg31100.txt, pg3200.txt +workman's pg3200.txt +workman, pg100.txt +workman. pg100.txt +workmanship pg3200.txt +workmanship--9. pg3200.txt +workmen pg31100.txt, pg3200.txt +workmen, pg3200.txt +workmen. pg3200.txt, pg100.txt +works pg31100.txt, pg3200.txt, pg100.txt +works! pg100.txt +works, pg31100.txt, pg3200.txt, pg100.txt +works. pg31100.txt, pg3200.txt, pg100.txt +works." pg31100.txt, pg3200.txt +works.' pg3200.txt +works: pg3200.txt +works; pg3200.txt, pg100.txt +works? pg3200.txt +workshop. pg3200.txt +worl' pg3200.txt +world pg31100.txt, pg3200.txt, pg100.txt +world! pg31100.txt, pg3200.txt, pg100.txt +world!" pg31100.txt, pg3200.txt +world!--could pg31100.txt +world!--we pg3200.txt +world's pg3200.txt, pg100.txt +world, pg31100.txt, pg3200.txt, pg100.txt +world,) pg3200.txt +world- pg100.txt +world--" pg3200.txt +world--." pg3200.txt +world--all pg3200.txt +world--amid pg3200.txt +world--and pg3200.txt +world--another pg3200.txt +world--drop pg3200.txt +world--four pg3200.txt +world--i pg3200.txt +world--prithee pg3200.txt +world--that pg3200.txt +world--the pg3200.txt +world-celebrated--astonished--happy--vain. pg3200.txt +world-conflagration pg3200.txt +world-figure, pg3200.txt +world. pg31100.txt, pg3200.txt, pg100.txt +world." pg31100.txt, pg3200.txt +world.' pg31100.txt, pg3200.txt +world.) pg3200.txt +world.... pg3200.txt +world: pg31100.txt, pg3200.txt, pg100.txt +world; pg31100.txt, pg3200.txt, pg100.txt +world? pg3200.txt, pg100.txt +world?" pg31100.txt, pg3200.txt +world?--a pg3200.txt +worldly pg31100.txt, pg3200.txt +worldly. pg3200.txt +worlds pg31100.txt, pg3200.txt +worlds. pg3200.txt +worm pg3200.txt, pg100.txt +worm"; pg3200.txt +worm, pg3200.txt, pg100.txt +worm-eaten pg3200.txt +worm. pg3200.txt, pg100.txt +worms pg3200.txt, pg100.txt +worms! pg100.txt +worms, pg3200.txt, pg100.txt +worms. pg3200.txt, pg100.txt +wormwood! pg100.txt +worn pg31100.txt, pg3200.txt, pg100.txt +worn, pg3200.txt, pg100.txt +worn--but pg3200.txt +worn. pg100.txt +worried pg31100.txt, pg3200.txt +worried, pg3200.txt +worried--but pg3200.txt +worried. pg3200.txt +worries, pg3200.txt +worries. pg3200.txt +worrow pg3200.txt +worry pg31100.txt, pg3200.txt +worry, pg3200.txt +worry." pg3200.txt +worry? pg3200.txt +worrying pg3200.txt +worrying. pg3200.txt +worse pg31100.txt, pg3200.txt, pg100.txt +worse! pg31100.txt, pg100.txt +worse); pg31100.txt +worse, pg31100.txt, pg3200.txt, pg100.txt +worse," pg31100.txt +worse- pg100.txt +worse--' pg3200.txt +worse--and pg3200.txt +worse. pg31100.txt, pg3200.txt, pg100.txt +worse." pg31100.txt, pg3200.txt +worse; pg31100.txt, pg3200.txt, pg100.txt +worse?" pg3200.txt +worser pg3200.txt, pg100.txt +worship pg3200.txt, pg100.txt +worship! pg3200.txt, pg100.txt +worship!" pg3200.txt +worship's pg100.txt +worship, pg3200.txt, pg100.txt +worship--" pg3200.txt +worship--what pg3200.txt +worship. pg3200.txt, pg100.txt +worship." pg3200.txt +worship; pg3200.txt, pg100.txt +worship? pg3200.txt, pg100.txt +worshiped pg3200.txt +worshiped, pg3200.txt +worshiped--think pg3200.txt +worshiped. pg3200.txt +worshipful pg3200.txt, pg100.txt +worshiping pg3200.txt +worshiping. pg3200.txt +worshipingly: pg3200.txt +worshipp'd pg100.txt +worshipped pg31100.txt, pg3200.txt +worshipped, pg3200.txt +worshipped. pg3200.txt +worshipper pg100.txt +worshippers? pg100.txt +worshipping pg3200.txt +worshipping, pg3200.txt +worshipping. pg3200.txt +worships pg100.txt +worst pg31100.txt, pg3200.txt, pg100.txt +worst! pg3200.txt, pg100.txt +worst'? pg100.txt +worst, pg3200.txt, pg100.txt +worst-beaten pg3200.txt +worst-extended pg100.txt +worst. pg31100.txt, pg3200.txt, pg100.txt +worst." pg31100.txt +worst.' pg100.txt +worst; pg3200.txt, pg100.txt +worst? pg100.txt +worth pg31100.txt, pg3200.txt, pg100.txt +worth! pg3200.txt, pg100.txt +worth" pg3200.txt +worth, pg31100.txt, pg3200.txt, pg100.txt +worth- pg100.txt +worth--remember pg3200.txt +worth--well, pg3200.txt +worth-while pg31100.txt +worth. pg31100.txt, pg3200.txt, pg100.txt +worth." pg31100.txt, pg3200.txt +worth: pg3200.txt +worth; pg100.txt +worth? pg100.txt +worth?" pg3200.txt +worthier pg3200.txt +worthier. pg3200.txt, pg100.txt +worthies pg3200.txt, pg100.txt +worthies! pg100.txt +worthies--the pg31100.txt +worthies. pg100.txt +worthies? pg100.txt +worthiest pg31100.txt, pg100.txt +worthiest, pg100.txt +worthily pg3200.txt, pg100.txt +worthiness pg100.txt +worthiness, pg100.txt +worthiness. pg100.txt +worthless pg3200.txt +worthless, pg31100.txt, pg3200.txt +worthless. pg3200.txt, pg100.txt +worthless; pg3200.txt +worthlessness pg31100.txt +worthy pg31100.txt, pg3200.txt, pg100.txt +worthy, pg3200.txt, pg100.txt +worthy. pg3200.txt, pg100.txt +worthy." pg31100.txt +worthy; pg100.txt +worts. pg100.txt +worts." pg3200.txt +wot pg100.txt +wot, pg100.txt +wot,' pg100.txt +wot. pg100.txt +wot; pg100.txt +wots pg100.txt +wou'dn't pg3200.txt +would! pg31100.txt +would!" pg31100.txt, pg3200.txt +would" pg100.txt +would'nt pg3200.txt +would've pg3200.txt +would, pg31100.txt, pg3200.txt, pg100.txt +would,' pg100.txt +would- pg100.txt +would--" pg3200.txt +would--for pg31100.txt +would--would pg3200.txt +would--yes. pg3200.txt +would-be pg3200.txt +would. pg31100.txt, pg3200.txt, pg100.txt +would." pg31100.txt, pg3200.txt +would; pg3200.txt +would? pg3200.txt, pg100.txt +would?" pg31100.txt, pg3200.txt +wouldn't pg3200.txt +wouldn't'a' pg3200.txt +wouldn't, pg3200.txt +wouldn't. pg3200.txt +wouldn't." pg3200.txt +wouldn't; pg3200.txt +wouldst pg3200.txt, pg100.txt +wouldst, pg100.txt +wouldst? pg100.txt +wound pg31100.txt, pg3200.txt, pg100.txt +wound! pg100.txt +wound, pg3200.txt, pg100.txt +wound--it pg3200.txt +wound. pg31100.txt, pg3200.txt, pg100.txt +wound; pg3200.txt, pg100.txt +wound? pg100.txt +wounded pg3200.txt, pg100.txt +wounded, pg3200.txt, pg100.txt +wounded--17. pg3200.txt +wounded. pg31100.txt, pg3200.txt, pg100.txt +wounded." pg3200.txt +wounded: pg3200.txt +wounded? pg100.txt +wounded?" pg3200.txt +woundily pg3200.txt +woundin' pg3200.txt +wounding pg31100.txt, pg3200.txt +wounding; pg100.txt +wounds pg31100.txt, pg3200.txt, pg100.txt +wounds! pg100.txt +wounds, pg3200.txt, pg100.txt +wounds. pg3200.txt, pg100.txt +wounds; pg3200.txt, pg100.txt +wove pg3200.txt +woven pg3200.txt, pg100.txt +wow?" pg3200.txt +wrack! pg100.txt +wrack) pg100.txt +wrack, pg100.txt +wrack." pg3200.txt +wrangle pg100.txt +wrangled pg3200.txt +wrangler pg100.txt +wrapped pg3200.txt +wrapper pg3200.txt +wrappers. pg3200.txt +wrapt pg31100.txt, pg100.txt +wrath pg3200.txt, pg100.txt +wrath! pg100.txt +wrath, pg100.txt +wrath-- pg3200.txt +wrath. pg100.txt +wrath; pg100.txt +wrathfully; pg100.txt +wreak pg3200.txt +wreaks, pg100.txt +wreath pg3200.txt +wreaths pg3200.txt +wreaths; pg100.txt +wreck pg3200.txt +wreck'd, pg100.txt +wreck'd. pg100.txt +wreck, pg3200.txt, pg100.txt +wreck- pg100.txt +wreck. pg3200.txt, pg100.txt +wreck." pg3200.txt +wreck; pg100.txt +wreck?" pg3200.txt +wreckage pg3200.txt +wrecked pg3200.txt +wrecking pg3200.txt +wrecks pg3200.txt +wrecks, pg100.txt +wrecks; pg3200.txt +wren, pg100.txt +wren. pg100.txt +wrench. pg3200.txt +wrenched pg3200.txt +wrenching. pg100.txt +wrest pg3200.txt +wrested pg3200.txt +wrestle] pg100.txt +wrestled pg3200.txt +wrestled? pg100.txt +wrestler pg100.txt +wrestler? pg100.txt +wrestling. pg100.txt +wrestling? pg100.txt +wretch pg31100.txt, pg3200.txt, pg100.txt +wretch! pg100.txt +wretch!" pg31100.txt +wretch, pg100.txt +wretch--it pg31100.txt +wretch. pg100.txt +wretch." pg31100.txt +wretched pg31100.txt, pg3200.txt, pg100.txt +wretched!" pg31100.txt +wretched, pg3200.txt, pg100.txt +wretched- pg3200.txt +wretched--and pg31100.txt +wretched. pg31100.txt, pg100.txt +wretcheder pg3200.txt +wretchedest pg3200.txt +wretchedly pg31100.txt +wretchedness pg31100.txt, pg3200.txt, pg100.txt +wretchedness, pg31100.txt, pg3200.txt +wretchedness. pg31100.txt, pg3200.txt +wretches pg100.txt +wretches!" pg31100.txt +wright pg31100.txt +wright, pg3200.txt +wring pg100.txt +wringer. pg100.txt +wringing pg100.txt +wringled pg3200.txt +wrinkle. pg3200.txt +wrinkled pg3200.txt +wrinkled, pg3200.txt +wrinkles! pg100.txt +wrinkles, pg31100.txt +wrinkles. pg3200.txt +wrist pg3200.txt +wrist, pg3200.txt, pg100.txt +wristbands; pg3200.txt +wrists pg3200.txt +writ pg3200.txt, pg100.txt +writ'n pg3200.txt +writ, pg100.txt +writ. pg100.txt +writ." pg31100.txt +writ: pg3200.txt, pg100.txt +writ:' pg3200.txt +writ; pg100.txt +writ? pg100.txt +write pg31100.txt, pg3200.txt, pg100.txt +write! pg100.txt +write, pg31100.txt, pg3200.txt, pg100.txt +write. pg31100.txt, pg3200.txt, pg100.txt +write." pg3200.txt +write; pg3200.txt +write;" pg31100.txt +write? pg3200.txt +write?" pg3200.txt +writer pg31100.txt, pg3200.txt, pg100.txt +writer's pg3200.txt +writer, pg3200.txt +writer. pg31100.txt, pg3200.txt +writer." pg3200.txt +writers pg3200.txt +writes pg31100.txt, pg3200.txt, pg100.txt +writes!" pg31100.txt +writes, pg3200.txt +writes. pg3200.txt, pg100.txt +writes." pg31100.txt +writes; pg100.txt +writes] pg100.txt +writhed pg3200.txt +writhing pg3200.txt +writin' pg3200.txt +writing pg31100.txt, pg3200.txt, pg100.txt +writing, pg31100.txt, pg3200.txt, pg100.txt +writing--the pg3200.txt +writing--this. pg3200.txt +writing-desk pg31100.txt +writing-desk, pg31100.txt +writing-machines pg3200.txt +writing. pg31100.txt, pg3200.txt, pg100.txt +writing." pg31100.txt +writing; pg31100.txt +writing?" pg3200.txt +writings pg31100.txt, pg3200.txt +writings. pg3200.txt +writings." pg3200.txt +writings? pg3200.txt +written pg31100.txt, pg3200.txt, pg100.txt +written"--in pg3200.txt +written, pg31100.txt, pg3200.txt, pg100.txt +written. pg3200.txt +written." pg31100.txt, pg3200.txt +written.' pg3200.txt +wrong pg31100.txt, pg3200.txt, pg100.txt +wrong! pg100.txt +wrong'd pg100.txt +wrong'd, pg100.txt +wrong'd. pg100.txt +wrong'd; pg100.txt +wrong, pg31100.txt, pg3200.txt, pg100.txt +wrong- pg3200.txt, pg100.txt +wrong--but--" pg3200.txt +wrong-and pg3200.txt +wrong-doing. pg3200.txt +wrong. pg31100.txt, pg3200.txt, pg100.txt +wrong." pg31100.txt, pg3200.txt +wrong: pg3200.txt, pg100.txt +wrong; pg31100.txt, pg3200.txt, pg100.txt +wrong? pg3200.txt, pg100.txt +wrong?" pg31100.txt, pg3200.txt +wronged pg3200.txt, pg100.txt +wronged, pg3200.txt +wronged. pg100.txt +wronged; pg3200.txt +wronger pg100.txt +wronger; pg100.txt +wrongfully, pg100.txt +wrongfully. pg100.txt +wrongfully? pg100.txt +wronging pg3200.txt, pg100.txt +wrongs pg3200.txt, pg100.txt +wrongs! pg100.txt +wrongs, pg100.txt +wrongs. pg3200.txt, pg100.txt +wrongs: pg100.txt +wrongs; pg100.txt +wrongs? pg100.txt +wronk: pg100.txt +wrote pg31100.txt, pg3200.txt, pg100.txt +wrote, pg31100.txt, pg3200.txt +wrote. pg31100.txt, pg3200.txt +wrote." pg3200.txt +wrote: pg3200.txt +wrote; pg3200.txt +wrote? pg100.txt +wrought pg3200.txt, pg100.txt +wrought! pg3200.txt +wrought, pg100.txt +wrought. pg100.txt +wrought." pg3200.txt +wrought: pg100.txt +wrought? pg3200.txt +wrung pg31100.txt, pg3200.txt +wunst." pg3200.txt +wuth pg3200.txt +wuz pg3200.txt +wuz." pg3200.txt +www.gutenberg.net pg3200.txt +www.gutenberg.org pg31100.txt, pg100.txt +wyalong pg3200.txt +wye pg100.txt +x pg31100.txt, pg3200.txt +x. pg3200.txt, pg100.txt +x; pg3200.txt +xerxes pg3200.txt +xi pg31100.txt, pg3200.txt +xi. pg3200.txt, pg100.txt +xii pg31100.txt, pg3200.txt +xii. pg3200.txt, pg100.txt +xii., pg3200.txt +xiii pg31100.txt, pg3200.txt +xiii. pg3200.txt, pg100.txt +xiv pg31100.txt, pg3200.txt +xiv. pg3200.txt, pg100.txt +xix pg31100.txt, pg3200.txt +xix. pg3200.txt +xl pg31100.txt, pg3200.txt +xl. pg3200.txt +xl.] pg3200.txt +xli pg31100.txt, pg3200.txt +xli. pg3200.txt +xlii pg31100.txt, pg3200.txt +xlii. pg3200.txt +xliii pg31100.txt, pg3200.txt +xliii. pg3200.txt +xliv pg31100.txt, pg3200.txt +xliv. pg3200.txt +xlix pg31100.txt, pg3200.txt +xlix. pg3200.txt +xlv pg31100.txt, pg3200.txt +xlv. pg3200.txt +xlvi pg31100.txt, pg3200.txt +xlvi, pg3200.txt +xlvi. pg3200.txt +xlvii pg31100.txt, pg3200.txt +xlvii. pg3200.txt +xlviii pg31100.txt, pg3200.txt +xlviii. pg3200.txt +xv pg31100.txt, pg3200.txt +xv. pg3200.txt, pg100.txt +xvi pg31100.txt, pg3200.txt +xvi. pg3200.txt +xvii pg31100.txt, pg3200.txt +xvii. pg3200.txt +xvii." pg3200.txt +xviii pg31100.txt, pg3200.txt +xviii. pg3200.txt +xx pg31100.txt, pg3200.txt +xx. pg3200.txt +xxi pg31100.txt, pg3200.txt +xxi. pg3200.txt +xxii pg31100.txt, pg3200.txt +xxii. pg3200.txt +xxiii pg31100.txt, pg3200.txt +xxiii. pg3200.txt +xxiv pg31100.txt, pg3200.txt +xxiv. pg3200.txt +xxix pg31100.txt, pg3200.txt +xxix. pg31100.txt, pg3200.txt +xxix: pg3200.txt +xxv pg31100.txt, pg3200.txt +xxv. pg3200.txt +xxvi pg31100.txt, pg3200.txt +xxvi. pg3200.txt +xxvii pg31100.txt, pg3200.txt +xxvii. pg3200.txt +xxviii pg31100.txt, pg3200.txt +xxviii. pg3200.txt +xxvil pg3200.txt +xxx pg31100.txt, pg3200.txt +xxx. pg3200.txt +xxxi pg31100.txt, pg3200.txt +xxxi. pg3200.txt +xxxii pg31100.txt, pg3200.txt +xxxii. pg3200.txt +xxxii.4. pg3200.txt +xxxiii pg31100.txt, pg3200.txt +xxxiii. pg3200.txt +xxxiv pg31100.txt, pg3200.txt +xxxiv. pg3200.txt +xxxix pg31100.txt, pg3200.txt +xxxix. pg3200.txt +xxxv pg31100.txt, pg3200.txt +xxxv. pg3200.txt +xxxvi pg31100.txt, pg3200.txt +xxxvi. pg3200.txt +xxxvii pg31100.txt, pg3200.txt +xxxvii. pg3200.txt +xxxviii pg31100.txt, pg3200.txt +xxxviii. pg3200.txt +xylaloes, pg3200.txt +y' pg3200.txt +y'r pg3200.txt +y, pg3200.txt +y---- pg3200.txt +y. pg3200.txt +y." pg3200.txt +y., pg3200.txt +y..... pg3200.txt +y.: pg3200.txt +yacht pg3200.txt +yachting pg3200.txt +yachts. pg3200.txt +yackamoorundie pg3200.txt +yahoo, pg3200.txt +yale pg3200.txt +yale. pg3200.txt +yale; pg3200.txt +yaller pg3200.txt +yaller-fever, pg3200.txt +yalta pg3200.txt +yalta. pg3200.txt +yander pg3200.txt +yang-tse-kiang, pg3200.txt +yank pg3200.txt +yank!' pg3200.txt +yankalilla pg3200.txt +yanked pg3200.txt +yankee pg3200.txt +yankee, pg3200.txt +yankee. pg3200.txt +yankees pg3200.txt +yanking pg3200.txt +yanks pg3200.txt +yaranyacka pg3200.txt +yaranyackah; pg3200.txt +yard pg31100.txt, pg3200.txt, pg100.txt +yard, pg31100.txt, pg3200.txt +yard-arm! pg3200.txt +yard-stick pg3200.txt +yard. pg3200.txt, pg100.txt +yard." pg31100.txt +yard; pg3200.txt +yards pg31100.txt, pg3200.txt +yards, pg3200.txt +yards--kind pg3200.txt +yards. pg3200.txt +yards; pg3200.txt +yards? pg3200.txt +yardstick. pg3200.txt +yare pg100.txt +yare! pg3200.txt +yarely, pg100.txt +yarmouth?" pg31100.txt +yarn pg3200.txt, pg100.txt +yarn, pg100.txt +yarns pg3200.txt +yas!" pg3200.txt +yatala pg3200.txt +yates pg31100.txt, pg3200.txt +yates's pg31100.txt +yates, pg31100.txt +yates." pg31100.txt +yates.'" pg3200.txt +yawing pg3200.txt +yawl pg3200.txt +yawl-boat pg3200.txt +yawn pg3200.txt +yawn. pg31100.txt +ye pg3200.txt, pg100.txt +ye! pg100.txt +ye!" pg3200.txt +ye!' pg3200.txt +ye'll pg3200.txt +ye). pg3200.txt +ye, pg3200.txt, pg100.txt +ye," pg3200.txt +ye- pg100.txt +ye--" pg3200.txt +ye. pg100.txt +ye: pg100.txt +ye; pg3200.txt, pg100.txt +ye? pg3200.txt, pg100.txt +ye?" pg3200.txt +yea pg100.txt +yea, pg3200.txt, pg100.txt +yea. pg100.txt +year pg31100.txt, pg3200.txt, pg100.txt +year! pg31100.txt, pg3200.txt, pg100.txt +year!" pg3200.txt +year's pg3200.txt +year, pg31100.txt, pg3200.txt, pg100.txt +year," pg3200.txt +year,) pg3200.txt +year--an pg3200.txt +year--and pg3200.txt +year--but pg3200.txt +year--they pg3200.txt +year. pg31100.txt, pg3200.txt, pg100.txt +year." pg31100.txt, pg3200.txt +year.' pg3200.txt +year.) pg3200.txt +year..... pg3200.txt +year: pg3200.txt +year; pg31100.txt, pg3200.txt, pg100.txt +year? pg3200.txt, pg100.txt +year?" pg31100.txt, pg3200.txt +year?' pg3200.txt +yearly pg31100.txt, pg3200.txt +yearly, pg3200.txt +yearn pg3200.txt, pg100.txt +yearned pg3200.txt +yearning pg31100.txt, pg3200.txt +yearning, pg3200.txt +yearningly pg3200.txt +years pg31100.txt, pg3200.txt, pg100.txt +years! pg31100.txt, pg3200.txt +years!" pg31100.txt, pg3200.txt +years!' pg3200.txt +years' pg31100.txt, pg3200.txt, pg100.txt +years, pg31100.txt, pg3200.txt, pg100.txt +years," pg31100.txt, pg3200.txt +years- pg100.txt +years--all pg3200.txt +years--and pg3200.txt +years--dreamy, pg3200.txt +years--from pg3200.txt +years--how pg31100.txt +years--i pg3200.txt +years--the pg3200.txt +years--they pg3200.txt +years--when pg3200.txt +years--yes, pg3200.txt +years. pg31100.txt, pg3200.txt, pg100.txt +years." pg31100.txt, pg3200.txt +years.' pg3200.txt +years; pg3200.txt, pg100.txt +years? pg3200.txt, pg100.txt +years?" pg31100.txt +years?' pg3200.txt +yeas, pg100.txt +yell pg100.txt +yell'd pg100.txt +yell, pg3200.txt +yell. pg3200.txt +yell: pg3200.txt +yelled pg3200.txt +yelling pg3200.txt +yelling, pg3200.txt +yelling: pg3200.txt +yellocution; pg3200.txt +yellow pg31100.txt, pg3200.txt, pg100.txt +yellow, pg3200.txt, pg100.txt +yellow-faced pg3200.txt +yellow-jacket pg3200.txt +yellow. pg3200.txt, pg100.txt +yellow; pg100.txt +yellowish, pg3200.txt +yellowness; pg100.txt +yellowstone; pg3200.txt +yells pg3200.txt +yells, pg3200.txt +yells: pg3200.txt +yelp pg3200.txt +yelp, pg3200.txt +yelped pg3200.txt +yenisei, pg3200.txt +yeoman. pg100.txt +yeomanry pg31100.txt +yeomanry. pg3200.txt +yeomen! pg100.txt +yeomen, pg100.txt +yere. pg3200.txt +yeres pg3200.txt +yeres. pg3200.txt +yerself?" pg3200.txt +yes pg31100.txt, pg3200.txt +yes! pg3200.txt +yes!" pg3200.txt +yes, pg31100.txt, pg3200.txt, pg100.txt +yes--all pg3200.txt +yes--anything. pg3200.txt +yes--many." pg3200.txt +yes--the pg3200.txt +yes--yes." pg3200.txt +yes--you pg3200.txt +yes-yes. pg3200.txt +yes. pg3200.txt, pg100.txt +yes." pg3200.txt +yes: pg100.txt +yes; pg31100.txt, pg3200.txt +yessoo pg3200.txt +yessoo, pg3200.txt +yesterday pg31100.txt, pg3200.txt, pg100.txt +yesterday's pg3200.txt +yesterday, pg31100.txt, pg3200.txt, pg100.txt +yesterday--and pg3200.txt +yesterday--but pg3200.txt +yesterday--or pg3200.txt +yesterday--they pg3200.txt +yesterday. pg31100.txt, pg3200.txt, pg100.txt +yesterday." pg31100.txt, pg3200.txt +yesterday.' pg3200.txt +yesterday; pg31100.txt, pg3200.txt +yesterday? pg3200.txt +yesterday?" pg31100.txt, pg3200.txt +yesterdays pg3200.txt +yestermorn; pg31100.txt +yesternight pg3200.txt, pg100.txt +yesternight, pg100.txt +yesternight. pg100.txt +yet pg31100.txt, pg3200.txt, pg100.txt +yet! pg3200.txt, pg100.txt +yet!! pg3200.txt +yet!!! pg3200.txt +yet!" pg3200.txt +yet!' pg3200.txt +yet'! pg100.txt +yet, pg31100.txt, pg3200.txt, pg100.txt +yet," pg3200.txt +yet,) pg3200.txt +yet- pg100.txt +yet--" pg3200.txt +yet--and pg3200.txt +yet--at pg3200.txt +yet--but pg31100.txt +yet--he pg31100.txt +yet--if pg3200.txt +yet--maybe pg3200.txt +yet--that pg3200.txt +yet--they're pg3200.txt +yet--wait." pg3200.txt +yet. pg31100.txt, pg3200.txt, pg100.txt +yet." pg31100.txt, pg3200.txt +yet.' pg3200.txt, pg100.txt +yet.') pg3200.txt +yet.* pg3200.txt +yet.--m.t. pg3200.txt +yet: pg3200.txt, pg100.txt +yet; pg31100.txt, pg3200.txt, pg100.txt +yet? pg31100.txt, pg3200.txt, pg100.txt +yet?" pg3200.txt +yet?' pg3200.txt +yett pg3200.txt +yew pg100.txt +yew, pg100.txt +yez. pg3200.txt +yield pg3200.txt, pg100.txt +yield!' pg3200.txt +yield, pg3200.txt, pg100.txt +yield. pg31100.txt, pg3200.txt, pg100.txt +yield." pg3200.txt +yield; pg100.txt +yield? pg100.txt +yielded pg31100.txt, pg3200.txt, pg100.txt +yielded, pg31100.txt, pg3200.txt, pg100.txt +yielded. pg100.txt +yielded; pg100.txt +yielding pg31100.txt, pg3200.txt +yielding, pg31100.txt, pg3200.txt, pg100.txt +yielding. pg3200.txt, pg100.txt +yields pg3200.txt, pg100.txt +yis, pg3200.txt +yit pg3200.txt +yit. pg3200.txt +yit." pg3200.txt +yo' pg3200.txt +yo'self?' pg3200.txt +yohanniss pg3200.txt +yoke pg100.txt +yoke, pg100.txt +yoke-mates. pg3200.txt +yoke. pg100.txt +yoke." pg3200.txt +yoke.' pg100.txt +yoke; pg100.txt +yoke? pg100.txt +yoked pg100.txt +yokes pg100.txt +yond pg100.txt +yond, pg100.txt +yond. pg100.txt +yond? pg100.txt +yonder pg3200.txt, pg100.txt +yonder! pg3200.txt +yonder's pg3200.txt +yonder, pg3200.txt, pg100.txt +yonder--i pg3200.txt +yonder-up pg3200.txt +yonder. pg3200.txt, pg100.txt +yonder." pg3200.txt +yonder.' pg3200.txt +yonder; pg3200.txt, pg100.txt +yonder? pg3200.txt, pg100.txt +yonder?" pg3200.txt +yore. pg3200.txt, pg100.txt +yorick's pg100.txt +yorick. pg3200.txt +york pg3200.txt, pg100.txt +york! pg3200.txt, pg100.txt +york" pg3200.txt +york's pg100.txt +york)-- pg3200.txt +york, pg3200.txt, pg100.txt +york,.............2,500 pg3200.txt +york-- pg3200.txt +york--23; pg3200.txt +york--america--home. pg3200.txt +york--and pg3200.txt +york--i pg3200.txt +york. pg3200.txt, pg100.txt +york." pg3200.txt +york.' pg3200.txt +york.'" pg3200.txt +york: pg3200.txt, pg100.txt +york; pg3200.txt, pg100.txt +york? pg3200.txt, pg100.txt +york?" pg3200.txt +yorkers pg3200.txt +yorkists pg100.txt +yorkshire pg31100.txt, pg100.txt +yorkshire;--that pg31100.txt +yorkshire?" pg31100.txt +yortzburg, pg3200.txt +yortzburgh, pg3200.txt +you! pg31100.txt, pg3200.txt, pg100.txt +you!" pg31100.txt, pg3200.txt +you!' pg3200.txt +you!- pg100.txt +you!--th--" pg3200.txt +you" pg31100.txt, pg3200.txt +you"--speaking pg31100.txt +you'. pg100.txt +you'd pg3200.txt, pg100.txt +you'll pg3200.txt, pg100.txt +you'll--" pg3200.txt +you're pg3200.txt, pg100.txt +you's pg3200.txt +you't pg3200.txt +you've pg3200.txt +you, pg31100.txt, pg3200.txt, pg100.txt +you," pg31100.txt, pg3200.txt +you,' pg3200.txt +you- pg100.txt +you-- pg3200.txt, pg100.txt +you--" pg31100.txt, pg3200.txt +you----" pg3200.txt +you--all pg3200.txt +you--all. pg3200.txt +you--and pg31100.txt, pg3200.txt +you--annotated. pg3200.txt +you--any pg3200.txt +you--be pg31100.txt +you--but pg31100.txt +you--could pg3200.txt +you--cousins?" pg3200.txt +you--created. pg3200.txt +you--does pg3200.txt +you--for pg3200.txt +you--has pg3200.txt +you--i pg3200.txt +you--just pg31100.txt +you--oh, pg3200.txt +you--or pg3200.txt +you--she pg31100.txt +you--sorry pg31100.txt +you--that pg3200.txt +you--the pg3200.txt +you--up-stream--now pg3200.txt +you--viz: pg3200.txt +you--with pg3200.txt +you--you pg3200.txt +you-all. pg3200.txt +you-i pg100.txt +you-well, pg100.txt +you. pg31100.txt, pg3200.txt, pg100.txt +you." pg31100.txt, pg3200.txt +you."] pg3200.txt +you.' pg3200.txt, pg100.txt +you.--how--how pg31100.txt +you.--so pg31100.txt +you.] pg3200.txt +you._"] pg31100.txt +you: pg31100.txt, pg100.txt +you:" pg31100.txt +you; pg31100.txt, pg3200.txt, pg100.txt +you;--but pg31100.txt +you? pg31100.txt, pg3200.txt, pg100.txt +you?" pg31100.txt, pg3200.txt +you?"--and pg3200.txt +you?' pg3200.txt, pg100.txt +you?--boat pg3200.txt +you?--does pg3200.txt +you?--don't pg3200.txt +you?--oh, pg3200.txt +you?--promise pg3200.txt +young pg31100.txt, pg3200.txt, pg100.txt +young's pg3200.txt +young, pg31100.txt, pg3200.txt, pg100.txt +young- pg3200.txt +young-- pg3200.txt +young--he pg31100.txt +young-man- pg3200.txt +young. pg3200.txt, pg100.txt +young." pg31100.txt, pg3200.txt +young.] pg3200.txt +young; pg31100.txt, pg100.txt +young? pg100.txt +young?" pg31100.txt, pg3200.txt +younger pg31100.txt, pg3200.txt, pg100.txt +younger! pg3200.txt +younger. pg100.txt +youngest pg31100.txt, pg3200.txt, pg100.txt +youngest. pg3200.txt +youngster pg3200.txt +youngsters! pg3200.txt +youngsters, pg3200.txt +your- pg100.txt +your--" pg3200.txt +your. pg3200.txt +yourn. pg3200.txt +yourn? pg3200.txt +yourn?" pg3200.txt +yours pg31100.txt, pg3200.txt, pg100.txt +yours! pg31100.txt, pg3200.txt, pg100.txt +yours!" pg3200.txt +yours) pg3200.txt +yours, pg31100.txt, pg3200.txt, pg100.txt +yours- pg100.txt +yours-- pg100.txt +yours--" pg31100.txt +yours--' pg3200.txt +yours--and pg3200.txt +yours. pg31100.txt, pg3200.txt, pg100.txt +yours." pg31100.txt, pg3200.txt +yours.' pg100.txt +yours: pg100.txt +yours; pg31100.txt, pg3200.txt, pg100.txt +yours? pg3200.txt, pg100.txt +yours?" pg3200.txt +yourself pg31100.txt, pg3200.txt, pg100.txt +yourself! pg100.txt +yourself!" pg3200.txt +yourself, pg31100.txt, pg3200.txt, pg100.txt +yourself--" pg31100.txt, pg3200.txt +yourself--and pg3200.txt +yourself. pg31100.txt, pg3200.txt, pg100.txt +yourself." pg31100.txt, pg3200.txt +yourself.' pg3200.txt +yourself: pg100.txt +yourself; pg3200.txt, pg100.txt +yourself? pg31100.txt, pg3200.txt, pg100.txt +yourself?" pg31100.txt, pg3200.txt +yourself?--for pg3200.txt +yourselves pg31100.txt, pg3200.txt, pg100.txt +yourselves! pg100.txt +yourselves" pg3200.txt +yourselves, pg3200.txt, pg100.txt +yourselves. pg31100.txt, pg3200.txt, pg100.txt +yourselves." pg31100.txt, pg3200.txt +yourselves: pg100.txt +yourselves; pg100.txt +yourselves? pg31100.txt, pg3200.txt, pg100.txt +youth pg31100.txt, pg3200.txt, pg100.txt +youth! pg3200.txt, pg100.txt +youth, pg31100.txt, pg3200.txt, pg100.txt +youth- pg100.txt +youth--rescuing pg3200.txt +youth--the pg3200.txt +youth. pg31100.txt, pg3200.txt, pg100.txt +youth.' pg100.txt +youth; pg100.txt +youth? pg3200.txt, pg100.txt +youthful pg31100.txt, pg3200.txt +youthful, pg3200.txt +youths pg3200.txt +youths, pg3200.txt +yr pg3200.txt +yrs pg3200.txt +yrs, pg3200.txt +yrs. pg3200.txt +yrxwly, pg3200.txt +ys pg3200.txt +yt pg3200.txt +yuma pg3200.txt +yuther pg3200.txt +z pg3200.txt +z. pg3200.txt +z9o.5. pg3200.txt +zambesi. pg3200.txt +zampillaerostation--" pg3200.txt +zanies. pg100.txt +zany, pg100.txt +ze pg3200.txt +zeal pg31100.txt, pg3200.txt, pg100.txt +zeal, pg3200.txt, pg100.txt +zeal-- pg3200.txt +zeal. pg3200.txt, pg100.txt +zealand pg3200.txt +zealand, pg3200.txt +zealand. pg3200.txt +zealand: pg3200.txt +zealand; pg3200.txt +zealous pg31100.txt, pg3200.txt +zebra, pg3200.txt +zedekiah, pg3200.txt +zei pg3200.txt +zeit! pg3200.txt +zeitung pg3200.txt +zeitvertreib. pg3200.txt +zenana. pg3200.txt +zenelophon; pg100.txt +zenith pg3200.txt +zenith. pg3200.txt +zermatt pg3200.txt +zermatt--at pg3200.txt +zermatt. pg3200.txt +zero pg3200.txt +zero, pg3200.txt +zest pg3200.txt +zest; pg3200.txt +ziani, pg3200.txt +zimmermanns? pg3200.txt +zion, pg3200.txt +zither pg3200.txt +zither, pg3200.txt +zoe, pg3200.txt +zone, pg100.txt +zonoras pg3200.txt +zoroaster pg3200.txt +zoroastrians, pg3200.txt +zounds! pg100.txt +zu pg3200.txt +zug pg3200.txt +zug. pg3200.txt +zuge pg3200.txt +zuruckkommen pg3200.txt +zusammen pg3200.txt +zusammengetroffen, pg3200.txt +zutphen. pg3200.txt +zwar pg3200.txt +zwei pg3200.txt +zwinglian, pg3200.txt +zylobalsamum--" pg3200.txt +{13} pg3200.txt +{1} pg3200.txt +{3} pg3200.txt +{5} pg3200.txt +{6} pg3200.txt +{7} pg3200.txt +{8} pg3200.txt +| pg3200.txt +|__________________________________| pg3200.txt +|| pg3200.txt +||=======|==== pg3200.txt diff --git a/Assign1/src/Question1/WordCount.java b/Assign1/src/Question1/WordCount.java deleted file mode 100644 index 163d1a1f84d41fa1d3f4dd30049ce2f9ec905007..0000000000000000000000000000000000000000 --- a/Assign1/src/Question1/WordCount.java +++ /dev/null @@ -1,76 +0,0 @@ -package Question1; - -import java.io.IOException; -import java.util.Arrays; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; - -public class WordCount extends Configured implements Tool { - public static void main(String[] args) throws Exception { - System.out.println(Arrays.toString(args)); - int res = ToolRunner.run(new Configuration(), new WordCount(), args); - - System.exit(res); - } - - @Override - public int run(String[] args) throws Exception { - System.out.println(Arrays.toString(args)); - Job job = new Job(getConf(), "WordCount"); - job.setJarByClass(WordCount.class); - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(IntWritable.class); - - job.setMapperClass(Map.class); - job.setReducerClass(Reduce.class); - - job.setInputFormatClass(TextInputFormat.class); - job.setOutputFormatClass(TextOutputFormat.class); - - FileInputFormat.addInputPath(job, new Path(args[0])); - FileOutputFormat.setOutputPath(job, new Path(args[1])); - - job.waitForCompletion(true); - - return 0; - } - - public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> { - private final static IntWritable ONE = new IntWritable(1); - private Text word = new Text(); - - @Override - public void map(LongWritable key, Text value, Context context) - throws IOException, InterruptedException { - for (String token: value.toString().split("\\s+")) { - word.set(token); - context.write(word, ONE); - } - } - } - - public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> { - @Override - public void reduce(Text key, Iterable<IntWritable> values, Context context) - throws IOException, InterruptedException { - int sum = 0; - for (IntWritable val : values) { - sum += val.get(); - } - context.write(key, new IntWritable(sum)); - } - } -} diff --git a/Assign1/src/Question2/InvIndex.java b/Assign1/src/Question2/InvIndex.java new file mode 100644 index 0000000000000000000000000000000000000000..454154326f46c6f212b9d88860534e01d55ab7f9 --- /dev/null +++ b/Assign1/src/Question2/InvIndex.java @@ -0,0 +1,123 @@ +package Question2; + +import java.util.Arrays; +import java.util.StringTokenizer; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.input.FileSplit; +import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; + +public class InvIndex extends Configured implements Tool { + + public static void main(String[] args) throws Exception { + System.out.println(Arrays.toString(args)); + int res = ToolRunner.run(new Configuration(), new InvIndex(), args); + + System.exit(res); + } + + public int run(String[] args) throws Exception { + + Job job = Job.getInstance(getConf()); + job.setJobName("InvIndex"); + job.setJarByClass(InvIndex.class); + + + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(Text.class); + + job.setMapperClass(Map.class); + job.setReducerClass(Reduce.class); + + job.setInputFormatClass(TextInputFormat.class); + job.setOutputFormatClass(TextOutputFormat.class); + + Path outputFilePath = new Path(args[3]); + + FileInputFormat.addInputPath(job, new Path(args[0])); + FileInputFormat.addInputPath(job, new Path(args[1])); + FileInputFormat.addInputPath(job, new Path(args[2])); + FileOutputFormat.setOutputPath(job, outputFilePath); + + + /* Delete output filepath if already exists */ + FileSystem fs = FileSystem.newInstance(getConf()); + + if (fs.exists(outputFilePath)) { + fs.delete(outputFilePath, true); + } + + return job.waitForCompletion(true) ? 0 : 1; + } + + public static class Map extends Mapper<LongWritable, Text, Text, Text> { + + private Text word = new Text(); + private Text filename = new Text(); + + private boolean caseSensitive = false; + + @Override + public void map(LongWritable key, Text value, Context context) + throws IOException, InterruptedException { + String filenameStr = ((FileSplit) context.getInputSplit()).getPath().getName(); + filename = new Text(filenameStr); + + String line = value.toString(); + + if (!caseSensitive) { + line = line.toLowerCase(); + } + + StringTokenizer tokenizer = new StringTokenizer(line); + while (tokenizer.hasMoreTokens()) { + word.set(tokenizer.nextToken()); + context.write(word, filename); + } + } + + @Override + protected void setup(Context context) throws IOException, InterruptedException { + Configuration conf = context.getConfiguration(); + this.caseSensitive = conf.getBoolean("wordcount.case.sensitive",false); + } + } + + public static class Reduce extends Reducer<Text, Text, Text, Text> { + + @Override + public void reduce(final Text key, final Iterable<Text> values, + final Context context) throws IOException, InterruptedException { + + StringBuilder stringBuilder = new StringBuilder(); + + for (Text value : values) { + stringBuilder.append(value.toString()); + + if (values.iterator().hasNext()) { + stringBuilder.append(", "); + } + } + + context.write(key, new Text(stringBuilder.toString())); + } + + } + + +} diff --git a/Assign1/src/Question2/InvertedIndex.java b/Assign1/src/Question2/InvertedIndex.java new file mode 100644 index 0000000000000000000000000000000000000000..e67b780e6d08480c5c22fb09f4d22fd8a2863ea3 --- /dev/null +++ b/Assign1/src/Question2/InvertedIndex.java @@ -0,0 +1,115 @@ +package Question2; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.HashSet; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.input.FileSplit; +import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; + +public class InvertedIndex extends Configured implements Tool { + public static void main(String[] args) throws Exception { + System.out.println(Arrays.toString(args)); + int res = ToolRunner.run(new Configuration(), new InvertedIndex(), args); + + System.exit(res); + } + + @Override + public int run(String[] args) throws Exception { + System.out.println(Arrays.toString(args)); + Job job = new Job(getConf(), "InvertedIndex"); + job.setJarByClass(InvertedIndex.class); + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(Text.class); + + job.setMapperClass(Map.class); + job.setReducerClass(Reduce.class); + + job.setInputFormatClass(TextInputFormat.class); + job.setOutputFormatClass(TextOutputFormat.class); + + Path outputFilePath = new Path(args[3]); + + FileInputFormat.addInputPath(job, new Path(args[0])); + FileInputFormat.addInputPath(job, new Path(args[1])); + FileInputFormat.addInputPath(job, new Path(args[2])); + FileOutputFormat.setOutputPath(job, outputFilePath); + + FileSystem fs = FileSystem.newInstance(getConf()); + + if (fs.exists(outputFilePath)) { + fs.delete(outputFilePath, true); + } + + job.waitForCompletion(true); + + return 0; + } + + public static class Map extends Mapper<LongWritable, Text, Text, Text> { + private Text word = new Text(); + private String stopwords_file = "/home/cloudera/workspace/bpa/Assign1/output_Q1.i/stopwords.csv"; + + @Override + public void map(LongWritable key, Text value, Context context) + throws IOException, InterruptedException { + + String stopwords = new String(Files.readAllBytes( + Paths.get(stopwords_file))); + + Text filename = new Text( + ((FileSplit) context.getInputSplit()) + .getPath().getName()); + + for (String token : value.toString().split("\\s+")) { + if (!stopwords.contains(token.toLowerCase())) { + word.set(token.toLowerCase()); + } + } + + context.write(word, filename); + } + } + + public static class Reduce extends Reducer<Text, Text, Text, Text> { + + @Override + public void reduce(Text key, Iterable<Text> values, Context context) + throws IOException, InterruptedException { + + HashSet<String> set = new HashSet<String>(); + + for (Text value : values) { + set.add(value.toString()); + } + StringBuilder builder = new StringBuilder(); + + String prefix = ""; + for (String s : set) { + builder.append(prefix); + prefix = ", "; + builder.append(s); + } + + context.write(key, new Text(builder.toString())); + } + } +}