Java Code Examples for org.apache.hadoop.mapreduce.MapReduceTestUtil#createJob()

The following examples show how to use org.apache.hadoop.mapreduce.MapReduceTestUtil#createJob() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestJobOutputCommitter.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private void testSuccessfulJob(String filename,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
  job.setOutputFormatClass(output);

  assertTrue("Job failed!", job.waitForCompletion(true));

  Path testFile = new Path(outDir, filename);
  assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for successful job "
        + job.getJobID(), fs.exists(file));
  }
}
 
Example 2
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example 3
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests Reducer throwing exception.
 * 
 * @throws Exception
 */
public void testReducerFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example 4
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the maps consuming output.
 * 
 * @throws Exception
 */
public void testChainMapNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example 5
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example 6
Source File: TestSingleElementChain.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public void testNoChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "a\nb\na\n";
  String expectedOutput = "a\t2\nb\t1\n";

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
      Text.class, Text.class, IntWritable.class, null);

  ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
      IntWritable.class, Text.class, IntWritable.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example 7
Source File: TestSingleElementChain.java    From big-c with Apache License 2.0 6 votes vote down vote up
public void testNoChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "a\nb\na\n";
  String expectedOutput = "a\t2\nb\t1\n";

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
      Text.class, Text.class, IntWritable.class, null);

  ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
      IntWritable.class, Text.class, IntWritable.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example 8
Source File: TestJobOutputCommitter.java    From big-c with Apache License 2.0 6 votes vote down vote up
private void testSuccessfulJob(String filename,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
  job.setOutputFormatClass(output);

  assertTrue("Job failed!", job.waitForCompletion(true));

  Path testFile = new Path(outDir, filename);
  assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for successful job "
        + job.getJobID(), fs.exists(file));
  }
}
 
Example 9
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example 10
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests Reducer throwing exception.
 * 
 * @throws Exception
 */
public void testReducerFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example 11
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the maps consuming output.
 * 
 * @throws Exception
 */
public void testChainMapNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example 12
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example 13
Source File: TestMRFieldSelection.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static void launch() throws Exception {
  Configuration conf = new Configuration();
  FileSystem fs = FileSystem.get(conf);
  int numOfInputLines = 10;

  Path outDir = new Path(testDir, "output_for_field_selection_test");
  Path inDir = new Path(testDir, "input_for_field_selection_test");

  StringBuffer inputData = new StringBuffer();
  StringBuffer expectedOutput = new StringBuffer();
  constructInputOutputData(inputData, expectedOutput, numOfInputLines);
  
  conf.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
  conf.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
  conf.set(
    FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir,
    1, 1, inputData.toString());
  job.setMapperClass(FieldSelectionMapper.class);
  job.setReducerClass(FieldSelectionReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(Text.class);
  job.setNumReduceTasks(1);

  job.waitForCompletion(true);
  assertTrue("Job Failed!", job.isSuccessful());

  //
  // Finally, we compare the reconstructed answer key with the
  // original one.  Remember, we need to ignore zero-count items
  // in the original key.
  //
  String outdata = MapReduceTestUtil.readOutput(outDir, conf);
  assertEquals("Outputs doesnt match.",expectedOutput.toString(), outdata);
  fs.delete(outDir, true);
}
 
Example 14
Source File: TestMRFieldSelection.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static void launch() throws Exception {
  Configuration conf = new Configuration();
  FileSystem fs = FileSystem.get(conf);
  int numOfInputLines = 10;

  Path outDir = new Path(testDir, "output_for_field_selection_test");
  Path inDir = new Path(testDir, "input_for_field_selection_test");

  StringBuffer inputData = new StringBuffer();
  StringBuffer expectedOutput = new StringBuffer();
  constructInputOutputData(inputData, expectedOutput, numOfInputLines);
  
  conf.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
  conf.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
  conf.set(
    FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir,
    1, 1, inputData.toString());
  job.setMapperClass(FieldSelectionMapper.class);
  job.setReducerClass(FieldSelectionReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(Text.class);
  job.setNumReduceTasks(1);

  job.waitForCompletion(true);
  assertTrue("Job Failed!", job.isSuccessful());

  //
  // Finally, we compare the reconstructed answer key with the
  // original one.  Remember, we need to ignore zero-count items
  // in the original key.
  //
  String outdata = MapReduceTestUtil.readOutput(outDir, conf);
  assertEquals("Outputs doesnt match.",expectedOutput.toString(), outdata);
  fs.delete(outDir, true);
}
 
Example 15
Source File: TestMapReduceChain.java    From big-c with Apache License 2.0 4 votes vote down vote up
public void testChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "1\n2\n";
  String expectedOutput = "0\t1ABCRDEF\n2\t2ABCRDEF\n";

  Configuration conf = createJobConf();
  cleanFlags(conf);
  conf.set("a", "X");

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  Configuration mapAConf = new Configuration(false);
  mapAConf.set("a", "A");
  ChainMapper.addMapper(job, AMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapAConf);

  ChainMapper.addMapper(job, BMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, CMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration reduceConf = new Configuration(false);
  reduceConf.set("a", "C");
  ChainReducer.setReducer(job, RReduce.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, reduceConf);

  ChainReducer.addMapper(job, DMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration mapEConf = new Configuration(false);
  mapEConf.set("a", "E");
  ChainReducer.addMapper(job, EMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapEConf);

  ChainReducer.addMapper(job, FMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());

  String str = "flag not set";
  assertTrue(str, getFlag(conf, "map.setup.A"));
  assertTrue(str, getFlag(conf, "map.setup.B"));
  assertTrue(str, getFlag(conf, "map.setup.C"));
  assertTrue(str, getFlag(conf, "reduce.setup.R"));
  assertTrue(str, getFlag(conf, "map.setup.D"));
  assertTrue(str, getFlag(conf, "map.setup.E"));
  assertTrue(str, getFlag(conf, "map.setup.F"));

  assertTrue(str, getFlag(conf, "map.A.value.1"));
  assertTrue(str, getFlag(conf, "map.A.value.2"));
  assertTrue(str, getFlag(conf, "map.B.value.1A"));
  assertTrue(str, getFlag(conf, "map.B.value.2A"));
  assertTrue(str, getFlag(conf, "map.C.value.1AB"));
  assertTrue(str, getFlag(conf, "map.C.value.2AB"));
  assertTrue(str, getFlag(conf, "reduce.R.value.1ABC"));
  assertTrue(str, getFlag(conf, "reduce.R.value.2ABC"));
  assertTrue(str, getFlag(conf, "map.D.value.1ABCR"));
  assertTrue(str, getFlag(conf, "map.D.value.2ABCR"));
  assertTrue(str, getFlag(conf, "map.E.value.1ABCRD"));
  assertTrue(str, getFlag(conf, "map.E.value.2ABCRD"));
  assertTrue(str, getFlag(conf, "map.F.value.1ABCRDE"));
  assertTrue(str, getFlag(conf, "map.F.value.2ABCRDE"));

  assertTrue(getFlag(conf, "map.cleanup.A"));
  assertTrue(getFlag(conf, "map.cleanup.B"));
  assertTrue(getFlag(conf, "map.cleanup.C"));
  assertTrue(getFlag(conf, "reduce.cleanup.R"));
  assertTrue(getFlag(conf, "map.cleanup.D"));
  assertTrue(getFlag(conf, "map.cleanup.E"));
  assertTrue(getFlag(conf, "map.cleanup.F"));

  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example 16
Source File: TestMRKeyFieldBasedComparator.java    From hadoop with Apache License 2.0 4 votes vote down vote up
private void testComparator(String keySpec, int expect) 
    throws Exception {
  String root = System.getProperty("test.build.data", "/tmp");
  Path inDir = new Path(root, "test_cmp/in");
  Path outDir = new Path(root, "test_cmp/out");
  
  conf.set("mapreduce.partition.keycomparator.options", keySpec);
  conf.set("mapreduce.partition.keypartitioner.options", "-k1.1,1.1");
  conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1,
              line1 +"\n" + line2 + "\n"); 
  job.setMapperClass(InverseMapper.class);
  job.setReducerClass(Reducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(LongWritable.class);
  job.setSortComparatorClass(KeyFieldBasedComparator.class);
  job.setPartitionerClass(KeyFieldBasedPartitioner.class);

  job.waitForCompletion(true);
  assertTrue(job.isSuccessful());

  // validate output
  Path[] outputFiles = FileUtil.stat2Paths(getFileSystem().listStatus(outDir,
      new Utils.OutputFileUtils.OutputFilesFilter()));
  if (outputFiles.length > 0) {
    InputStream is = getFileSystem().open(outputFiles[0]);
    BufferedReader reader = new BufferedReader(new InputStreamReader(is));
    String line = reader.readLine();
    //make sure we get what we expect as the first line, and also
    //that we have two lines (both the lines must end up in the same
    //reducer since the partitioner takes the same key spec for all
    //lines
    if (expect == 1) {
      assertTrue(line.startsWith(line1));
    } else if (expect == 2) {
      assertTrue(line.startsWith(line2));
    }
    line = reader.readLine();
    if (expect == 1) {
      assertTrue(line.startsWith(line2));
    } else if (expect == 2) {
      assertTrue(line.startsWith(line1));
    }
    reader.close();
  }
}
 
Example 17
Source File: TestMRKeyFieldBasedComparator.java    From big-c with Apache License 2.0 4 votes vote down vote up
private void testComparator(String keySpec, int expect) 
    throws Exception {
  String root = System.getProperty("test.build.data", "/tmp");
  Path inDir = new Path(root, "test_cmp/in");
  Path outDir = new Path(root, "test_cmp/out");
  
  conf.set("mapreduce.partition.keycomparator.options", keySpec);
  conf.set("mapreduce.partition.keypartitioner.options", "-k1.1,1.1");
  conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1,
              line1 +"\n" + line2 + "\n"); 
  job.setMapperClass(InverseMapper.class);
  job.setReducerClass(Reducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(LongWritable.class);
  job.setSortComparatorClass(KeyFieldBasedComparator.class);
  job.setPartitionerClass(KeyFieldBasedPartitioner.class);

  job.waitForCompletion(true);
  assertTrue(job.isSuccessful());

  // validate output
  Path[] outputFiles = FileUtil.stat2Paths(getFileSystem().listStatus(outDir,
      new Utils.OutputFileUtils.OutputFilesFilter()));
  if (outputFiles.length > 0) {
    InputStream is = getFileSystem().open(outputFiles[0]);
    BufferedReader reader = new BufferedReader(new InputStreamReader(is));
    String line = reader.readLine();
    //make sure we get what we expect as the first line, and also
    //that we have two lines (both the lines must end up in the same
    //reducer since the partitioner takes the same key spec for all
    //lines
    if (expect == 1) {
      assertTrue(line.startsWith(line1));
    } else if (expect == 2) {
      assertTrue(line.startsWith(line2));
    }
    line = reader.readLine();
    if (expect == 1) {
      assertTrue(line.startsWith(line2));
    } else if (expect == 2) {
      assertTrue(line.startsWith(line1));
    }
    reader.close();
  }
}
 
Example 18
Source File: TestMapReduceChain.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public void testChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "1\n2\n";
  String expectedOutput = "0\t1ABCRDEF\n2\t2ABCRDEF\n";

  Configuration conf = createJobConf();
  cleanFlags(conf);
  conf.set("a", "X");

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  Configuration mapAConf = new Configuration(false);
  mapAConf.set("a", "A");
  ChainMapper.addMapper(job, AMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapAConf);

  ChainMapper.addMapper(job, BMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, CMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration reduceConf = new Configuration(false);
  reduceConf.set("a", "C");
  ChainReducer.setReducer(job, RReduce.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, reduceConf);

  ChainReducer.addMapper(job, DMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  Configuration mapEConf = new Configuration(false);
  mapEConf.set("a", "E");
  ChainReducer.addMapper(job, EMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, mapEConf);

  ChainReducer.addMapper(job, FMap.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());

  String str = "flag not set";
  assertTrue(str, getFlag(conf, "map.setup.A"));
  assertTrue(str, getFlag(conf, "map.setup.B"));
  assertTrue(str, getFlag(conf, "map.setup.C"));
  assertTrue(str, getFlag(conf, "reduce.setup.R"));
  assertTrue(str, getFlag(conf, "map.setup.D"));
  assertTrue(str, getFlag(conf, "map.setup.E"));
  assertTrue(str, getFlag(conf, "map.setup.F"));

  assertTrue(str, getFlag(conf, "map.A.value.1"));
  assertTrue(str, getFlag(conf, "map.A.value.2"));
  assertTrue(str, getFlag(conf, "map.B.value.1A"));
  assertTrue(str, getFlag(conf, "map.B.value.2A"));
  assertTrue(str, getFlag(conf, "map.C.value.1AB"));
  assertTrue(str, getFlag(conf, "map.C.value.2AB"));
  assertTrue(str, getFlag(conf, "reduce.R.value.1ABC"));
  assertTrue(str, getFlag(conf, "reduce.R.value.2ABC"));
  assertTrue(str, getFlag(conf, "map.D.value.1ABCR"));
  assertTrue(str, getFlag(conf, "map.D.value.2ABCR"));
  assertTrue(str, getFlag(conf, "map.E.value.1ABCRD"));
  assertTrue(str, getFlag(conf, "map.E.value.2ABCRD"));
  assertTrue(str, getFlag(conf, "map.F.value.1ABCRDE"));
  assertTrue(str, getFlag(conf, "map.F.value.2ABCRDE"));

  assertTrue(getFlag(conf, "map.cleanup.A"));
  assertTrue(getFlag(conf, "map.cleanup.B"));
  assertTrue(getFlag(conf, "map.cleanup.C"));
  assertTrue(getFlag(conf, "reduce.cleanup.R"));
  assertTrue(getFlag(conf, "map.cleanup.D"));
  assertTrue(getFlag(conf, "map.cleanup.E"));
  assertTrue(getFlag(conf, "map.cleanup.F"));

  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}