org.apache.hadoop.mapreduce.MapReduceTestUtil Java Examples

The following examples show how to use org.apache.hadoop.mapreduce.MapReduceTestUtil. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestMapReduceJobControl.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Test(timeout = 30000)
public void testControlledJob() throws Exception {
  LOG.info("Starting testControlledJob");

  Configuration conf = createJobConf();
  cleanupData(conf);
  Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
  JobControl theControl = createDependencies(conf, job1);
  while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
    try {
      Thread.sleep(100);
    } catch (InterruptedException e) {
      break;
    }
  }
  Assert.assertNotNull(cjob1.getMapredJobId());

  // wait till all the jobs complete
  waitTillAllFinished(theControl);
  assertEquals("Some jobs failed", 0, theControl.getFailedJobList().size());
  theControl.stop();
}
 
Example #2
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the maps consuming output.
 * 
 * @throws Exception
 */
public void testChainMapNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example #3
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests Reducer throwing exception.
 * 
 * @throws Exception
 */
public void testReducerFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example #4
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example #5
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests Reducer throwing exception.
 * 
 * @throws Exception
 */
public void testReducerFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example #6
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example #7
Source File: TestJobOutputCommitter.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private void testFailedJob(String fileName,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createFailJob(conf, outDir, inDir);
  job.setOutputFormatClass(output);

  assertFalse("Job did not fail!", job.waitForCompletion(true));

  if (fileName != null) {
    Path testFile = new Path(outDir, fileName);
    assertTrue("File " + testFile + " missing for failed job " + job.getJobID(),
        fs.exists(testFile));
  }

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for failed job "
        + job.getJobID(), fs.exists(file));
  }
}
 
Example #8
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the maps consuming output.
 * 
 * @throws Exception
 */
public void testChainMapNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example #9
Source File: TestSingleElementChain.java    From big-c with Apache License 2.0 6 votes vote down vote up
public void testNoChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "a\nb\na\n";
  String expectedOutput = "a\t2\nb\t1\n";

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
      Text.class, Text.class, IntWritable.class, null);

  ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
      IntWritable.class, Text.class, IntWritable.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example #10
Source File: TestWALRecordReader.java    From hbase with Apache License 2.0 6 votes vote down vote up
/**
 * Create a new reader from the split, and match the edits against the passed columns.
 */
private void testSplit(InputSplit split, byte[]... columns) throws Exception {
  WALRecordReader<WALKey> reader = getReader();
  reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));

  for (byte[] column : columns) {
    assertTrue(reader.nextKeyValue());
    Cell cell = reader.getCurrentValue().getCells().get(0);
    if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(),
      cell.getQualifierOffset(), cell.getQualifierLength())) {
      assertTrue(
        "expected [" + Bytes.toString(column) + "], actual [" + Bytes.toString(
          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]",
        false);
    }
  }
  assertFalse(reader.nextKeyValue());
  reader.close();
}
 
Example #11
Source File: TestMRKeyValueTextInputFormat.java    From big-c with Apache License 2.0 6 votes vote down vote up
private static List<Text> readSplit(KeyValueTextInputFormat format, 
    InputSplit split, Job job) throws IOException, InterruptedException {
  List<Text> result = new ArrayList<Text>();
  Configuration conf = job.getConfiguration();
  TaskAttemptContext context = MapReduceTestUtil.
    createDummyMapTaskAttemptContext(conf);
  RecordReader<Text, Text> reader = format.createRecordReader(split, 
    MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
  MapContext<Text, Text, Text, Text> mcontext = 
    new MapContextImpl<Text, Text, Text, Text>(conf, 
    context.getTaskAttemptID(), reader, null, null,
    MapReduceTestUtil.createDummyReporter(), 
    split);
  reader.initialize(split, mcontext);
  while (reader.nextKeyValue()) {
    result.add(new Text(reader.getCurrentValue()));
  }
  reader.close();
  return result;
}
 
Example #12
Source File: TestJobOutputCommitter.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private void testSuccessfulJob(String filename,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
  job.setOutputFormatClass(output);

  assertTrue("Job failed!", job.waitForCompletion(true));

  Path testFile = new Path(outDir, filename);
  assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for successful job "
        + job.getJobID(), fs.exists(file));
  }
}
 
Example #13
Source File: TestSingleElementChain.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public void testNoChain() throws Exception {
  Path inDir = new Path(localPathRoot, "testing/chain/input");
  Path outDir = new Path(localPathRoot, "testing/chain/output");
  String input = "a\nb\na\n";
  String expectedOutput = "a\t2\nb\t1\n";

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
      Text.class, Text.class, IntWritable.class, null);

  ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
      IntWritable.class, Text.class, IntWritable.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example #14
Source File: TestMapReduceJobControl.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Test(timeout = 30000)
public void testControlledJob() throws Exception {
  LOG.info("Starting testControlledJob");

  Configuration conf = createJobConf();
  cleanupData(conf);
  Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
  JobControl theControl = createDependencies(conf, job1);
  while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
    try {
      Thread.sleep(100);
    } catch (InterruptedException e) {
      break;
    }
  }
  Assert.assertNotNull(cjob1.getMapredJobId());

  // wait till all the jobs complete
  waitTillAllFinished(theControl);
  assertEquals("Some jobs failed", 0, theControl.getFailedJobList().size());
  theControl.stop();
}
 
Example #15
Source File: TestMapReduceJobControl.java    From big-c with Apache License 2.0 6 votes vote down vote up
public void testJobControlWithFailJob() throws Exception {
  LOG.info("Starting testJobControlWithFailJob");
  Configuration conf = createJobConf();

  cleanupData(conf);
  
  // create a Fail job
  Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
  
  // create job dependencies
  JobControl theControl = createDependencies(conf, job1);
  
  // wait till all the jobs complete
  waitTillAllFinished(theControl);
  
  assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
  assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
  assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
  assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);

  theControl.stop();
}
 
Example #16
Source File: TestMRKeyValueTextInputFormat.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private static List<Text> readSplit(KeyValueTextInputFormat format, 
    InputSplit split, Job job) throws IOException, InterruptedException {
  List<Text> result = new ArrayList<Text>();
  Configuration conf = job.getConfiguration();
  TaskAttemptContext context = MapReduceTestUtil.
    createDummyMapTaskAttemptContext(conf);
  RecordReader<Text, Text> reader = format.createRecordReader(split, 
    MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
  MapContext<Text, Text, Text, Text> mcontext = 
    new MapContextImpl<Text, Text, Text, Text>(conf, 
    context.getTaskAttemptID(), reader, null, null,
    MapReduceTestUtil.createDummyReporter(), 
    split);
  reader.initialize(split, mcontext);
  while (reader.nextKeyValue()) {
    result.add(new Text(reader.getCurrentValue()));
  }
  reader.close();
  return result;
}
 
Example #17
Source File: TestJobOutputCommitter.java    From big-c with Apache License 2.0 6 votes vote down vote up
private void testFailedJob(String fileName,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createFailJob(conf, outDir, inDir);
  job.setOutputFormatClass(output);

  assertFalse("Job did not fail!", job.waitForCompletion(true));

  if (fileName != null) {
    Path testFile = new Path(outDir, fileName);
    assertTrue("File " + testFile + " missing for failed job " + job.getJobID(),
        fs.exists(testFile));
  }

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for failed job "
        + job.getJobID(), fs.exists(file));
  }
}
 
Example #18
Source File: TestJobOutputCommitter.java    From big-c with Apache License 2.0 6 votes vote down vote up
private void testSuccessfulJob(String filename,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
  job.setOutputFormatClass(output);

  assertTrue("Job failed!", job.waitForCompletion(true));

  Path testFile = new Path(outDir, filename);
  assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for successful job "
        + job.getJobID(), fs.exists(file));
  }
}
 
Example #19
Source File: TestChainErrors.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Tests reducer consuming output.
 * 
 * @throws Exception
 */
public void testChainReduceNoOuptut() throws Exception {
  Configuration conf = createJobConf();
  String expectedOutput = "";

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
      Text.class, LongWritable.class, Text.class, null);

  ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job failed", job.isSuccessful());
  assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
      .readOutput(outDir, conf));
}
 
Example #20
Source File: TestCombineTextInputFormat.java    From hadoop with Apache License 2.0 6 votes vote down vote up
private static List<Text> readSplit(InputFormat<LongWritable,Text> format,
  InputSplit split, Job job) throws IOException, InterruptedException {
  List<Text> result = new ArrayList<Text>();
  Configuration conf = job.getConfiguration();
  TaskAttemptContext context = MapReduceTestUtil.
    createDummyMapTaskAttemptContext(conf);
  RecordReader<LongWritable, Text> reader = format.createRecordReader(split,
    MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
  MapContext<LongWritable,Text,LongWritable,Text> mcontext =
    new MapContextImpl<LongWritable,Text,LongWritable,Text>(conf,
    context.getTaskAttemptID(), reader, null, null,
    MapReduceTestUtil.createDummyReporter(),
    split);
  reader.initialize(split, mcontext);
  while (reader.nextKeyValue()) {
    result.add(new Text(reader.getCurrentValue()));
  }
  return result;
}
 
Example #21
Source File: TestCombineTextInputFormat.java    From big-c with Apache License 2.0 6 votes vote down vote up
private static List<Text> readSplit(InputFormat<LongWritable,Text> format,
  InputSplit split, Job job) throws IOException, InterruptedException {
  List<Text> result = new ArrayList<Text>();
  Configuration conf = job.getConfiguration();
  TaskAttemptContext context = MapReduceTestUtil.
    createDummyMapTaskAttemptContext(conf);
  RecordReader<LongWritable, Text> reader = format.createRecordReader(split,
    MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
  MapContext<LongWritable,Text,LongWritable,Text> mcontext =
    new MapContextImpl<LongWritable,Text,LongWritable,Text>(conf,
    context.getTaskAttemptID(), reader, null, null,
    MapReduceTestUtil.createDummyReporter(),
    split);
  reader.initialize(split, mcontext);
  while (reader.nextKeyValue()) {
    result.add(new Text(reader.getCurrentValue()));
  }
  return result;
}
 
Example #22
Source File: TestMapReduceJobControl.java    From hadoop with Apache License 2.0 6 votes vote down vote up
public void testJobControlWithFailJob() throws Exception {
  LOG.info("Starting testJobControlWithFailJob");
  Configuration conf = createJobConf();

  cleanupData(conf);
  
  // create a Fail job
  Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
  
  // create job dependencies
  JobControl theControl = createDependencies(conf, job1);
  
  // wait till all the jobs complete
  waitTillAllFinished(theControl);
  
  assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
  assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
  assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
  assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);

  theControl.stop();
}
 
Example #23
Source File: TestChainErrors.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Tests one of the mappers throwing exception.
 * 
 * @throws Exception
 */
public void testChainFail() throws Exception {

  Configuration conf = createJobConf();

  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
  job.setJobName("chain");

  ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
      IntWritable.class, Text.class, null);

  ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
      LongWritable.class, Text.class, null);

  job.waitForCompletion(true);
  assertTrue("Job Not failed", !job.isSuccessful());
}
 
Example #24
Source File: TestMRFieldSelection.java    From big-c with Apache License 2.0 5 votes vote down vote up
public static void launch() throws Exception {
  Configuration conf = new Configuration();
  FileSystem fs = FileSystem.get(conf);
  int numOfInputLines = 10;

  Path outDir = new Path(testDir, "output_for_field_selection_test");
  Path inDir = new Path(testDir, "input_for_field_selection_test");

  StringBuffer inputData = new StringBuffer();
  StringBuffer expectedOutput = new StringBuffer();
  constructInputOutputData(inputData, expectedOutput, numOfInputLines);
  
  conf.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
  conf.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
  conf.set(
    FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");
  Job job = MapReduceTestUtil.createJob(conf, inDir, outDir,
    1, 1, inputData.toString());
  job.setMapperClass(FieldSelectionMapper.class);
  job.setReducerClass(FieldSelectionReducer.class);
  job.setOutputKeyClass(Text.class);
  job.setOutputValueClass(Text.class);
  job.setNumReduceTasks(1);

  job.waitForCompletion(true);
  assertTrue("Job Failed!", job.isSuccessful());

  //
  // Finally, we compare the reconstructed answer key with the
  // original one.  Remember, we need to ignore zero-count items
  // in the original key.
  //
  String outdata = MapReduceTestUtil.readOutput(outDir, conf);
  assertEquals("Outputs doesnt match.",expectedOutput.toString(), outdata);
  fs.delete(outDir, true);
}
 
Example #25
Source File: TestJobOutputCommitter.java    From big-c with Apache License 2.0 5 votes vote down vote up
private void testKilledJob(String fileName,
    Class<? extends OutputFormat> output, String[] exclude) throws Exception {
  Path outDir = getNewOutputDir();
  Job job = MapReduceTestUtil.createKillJob(conf, outDir, inDir);
  job.setOutputFormatClass(output);

  job.submit();

  // wait for the setup to be completed
  while (job.setupProgress() != 1.0f) {
    UtilsForTests.waitFor(100);
  }

  job.killJob(); // kill the job

  assertFalse("Job did not get kill", job.waitForCompletion(true));

  if (fileName != null) {
    Path testFile = new Path(outDir, fileName);
    assertTrue("File " + testFile + " missing for job " + job.getJobID(), fs
        .exists(testFile));
  }

  // check if the files from the missing set exists
  for (String ex : exclude) {
    Path file = new Path(outDir, ex);
    assertFalse("File " + file + " should not be present for killed job "
        + job.getJobID(), fs.exists(file));
  }
}
 
Example #26
Source File: TestWALRecordReader.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Create a new reader from the split, match the edits against the passed columns,
 * moving WAL to archive in between readings
 */
private void testSplitWithMovingWAL(InputSplit split, byte[] col1, byte[] col2) throws Exception {
  WALRecordReader<WALKey> reader = getReader();
  reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));

  assertTrue(reader.nextKeyValue());
  Cell cell = reader.getCurrentValue().getCells().get(0);
  if (!Bytes.equals(col1, 0, col1.length, cell.getQualifierArray(), cell.getQualifierOffset(),
    cell.getQualifierLength())) {
    assertTrue(
      "expected [" + Bytes.toString(col1) + "], actual [" + Bytes.toString(
        cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]",
      false);
  }
  // Move log file to archive directory
  // While WAL record reader is open
  WALInputFormat.WALSplit split_ = (WALInputFormat.WALSplit) split;

  Path logFile = new Path(split_.getLogFileName());
  Path archivedLog = AbstractFSWALProvider.getArchivedLogPath(logFile, conf);
  boolean result = fs.rename(logFile, archivedLog);
  assertTrue(result);
  result = fs.exists(archivedLog);
  assertTrue(result);
  assertTrue(reader.nextKeyValue());
  cell = reader.getCurrentValue().getCells().get(0);
  if (!Bytes.equals(col2, 0, col2.length, cell.getQualifierArray(), cell.getQualifierOffset(),
    cell.getQualifierLength())) {
    assertTrue(
      "expected [" + Bytes.toString(col2) + "], actual [" + Bytes.toString(
        cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]",
      false);
  }
  reader.close();
}
 
Example #27
Source File: TestStreamingStatus.java    From hadoop with Apache License 2.0 5 votes vote down vote up
void validateTaskStderr(StreamJob job, TaskType type)
    throws IOException {
  TaskAttemptID attemptId =
      new TaskAttemptID(new TaskID(job.jobId_, type, 0), 0);

  String log = MapReduceTestUtil.readTaskLog(TaskLog.LogName.STDERR,
      attemptId, false);

  // trim() is called on expectedStderr here because the method
  // MapReduceTestUtil.readTaskLog() returns trimmed String.
  assertTrue(log.equals(expectedStderr.trim()));
}
 
Example #28
Source File: TestStreamingStatus.java    From hadoop with Apache License 2.0 5 votes vote down vote up
void validateJobOutput(Configuration conf)
    throws IOException {

  String output = MapReduceTestUtil.readOutput(
      new Path(OUTPUT_DIR), conf).trim();

  assertTrue(output.equals(expectedOutput));
}
 
Example #29
Source File: TestDistCacheEmulation.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Validate setupGenerateDistCacheData by validating <li>permissions of the
 * distributed cache directories and <li>content of the generated sequence
 * file. This includes validation of dist cache file paths and their file
 * sizes.
 */
private void validateSetupGenDC(Configuration jobConf, long[] sortedFileSizes)
    throws IOException, InterruptedException {
  // build things needed for validation
  long sumOfFileSizes = 0;
  for (int i = 0; i < sortedFileSizes.length; i++) {
    sumOfFileSizes += sortedFileSizes[i];
  }

  FileSystem fs = FileSystem.get(jobConf);
  assertEquals("Number of distributed cache files to be generated is wrong.",
      sortedFileSizes.length,
      jobConf.getInt(GenerateDistCacheData.GRIDMIX_DISTCACHE_FILE_COUNT, -1));
  assertEquals("Total size of dist cache files to be generated is wrong.",
      sumOfFileSizes,
      jobConf.getLong(GenerateDistCacheData.GRIDMIX_DISTCACHE_BYTE_COUNT, -1));
  Path filesListFile = new Path(
      jobConf.get(GenerateDistCacheData.GRIDMIX_DISTCACHE_FILE_LIST));
  FileStatus stat = fs.getFileStatus(filesListFile);
  assertEquals("Wrong permissions of dist Cache files list file "
      + filesListFile, new FsPermission((short) 0644), stat.getPermission());

  InputSplit split = new FileSplit(filesListFile, 0, stat.getLen(),
      (String[]) null);
  TaskAttemptContext taskContext = MapReduceTestUtil
      .createDummyMapTaskAttemptContext(jobConf);
  RecordReader<LongWritable, BytesWritable> reader = new GenerateDistCacheData.GenDCDataFormat()
      .createRecordReader(split, taskContext);
  MapContext<LongWritable, BytesWritable, NullWritable, BytesWritable> mapContext = new MapContextImpl<LongWritable, BytesWritable, NullWritable, BytesWritable>(
      jobConf, taskContext.getTaskAttemptID(), reader, null, null,
      MapReduceTestUtil.createDummyReporter(), split);
  reader.initialize(split, mapContext);

  // start validating setupGenerateDistCacheData
  doValidateSetupGenDC(reader, fs, sortedFileSizes);
}
 
Example #30
Source File: TestFixedLengthInputFormat.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Test with record length set to 0
 */
@Test (timeout=5000)
public void testZeroRecordLength() throws Exception {
  localFs.delete(workDir, true);
  Path file = new Path(workDir, new String("testFormat.txt"));
  createFile(file, null, 10, 10);
  Job job = Job.getInstance(defaultConf);
  // Set the fixed length record length config property 
  FixedLengthInputFormat format = new FixedLengthInputFormat();
  format.setRecordLength(job.getConfiguration(), 0);
  FileInputFormat.setInputPaths(job, workDir);
  List<InputSplit> splits = format.getSplits(job);
  boolean exceptionThrown = false;
  for (InputSplit split : splits) {
    try {
      TaskAttemptContext context =
          MapReduceTestUtil.createDummyMapTaskAttemptContext(
          job.getConfiguration());
      RecordReader<LongWritable, BytesWritable> reader = 
          format.createRecordReader(split, context);
      MapContext<LongWritable, BytesWritable, LongWritable, BytesWritable>
          mcontext =
          new MapContextImpl<LongWritable, BytesWritable, LongWritable,
          BytesWritable>(job.getConfiguration(), context.getTaskAttemptID(),
          reader, null, null, MapReduceTestUtil.createDummyReporter(), split);
      reader.initialize(split, mcontext);
    } catch(IOException ioe) {
      exceptionThrown = true;
      LOG.info("Exception message:" + ioe.getMessage());
    }
  }
  assertTrue("Exception for zero record length:", exceptionThrown);
}